repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
gordielachance/plugin.audio.subsonic
|
main.py
|
1
|
46940
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Module: main
# Author: G.Breant
# Created on: 14 January 2017
# License: GPL v.3 https://www.gnu.org/copyleft/gpl.html
import xbmcvfs
import os
import xbmcaddon
import xbmcplugin
import xbmcgui
import json
import shutil
import dateutil.parser
from datetime import datetime
from collections import MutableMapping, namedtuple
# Add the /lib folder to sys
sys.path.append(xbmcvfs.translatePath(os.path.join(xbmcaddon.Addon("plugin.audio.subsonic").getAddonInfo("path"), "lib")))
import libsonic#Removed libsonic_extra
from simpleplugin import Plugin
from simpleplugin import Addon
# Create plugin instance
plugin = Plugin()
# initialize_gettext
#_ = plugin.initialize_gettext()
connection = None
cachetime = int(Addon().get_setting('cachetime'))
local_starred = set({})
ListContext = namedtuple('ListContext', ['listing', 'succeeded','update_listing', 'cache_to_disk','sort_methods', 'view_mode','content', 'category'])
PlayContext = namedtuple('PlayContext', ['path', 'play_item', 'succeeded'])
def popup(text, time=5000, image=None):
title = plugin.addon.getAddonInfo('name')
icon = plugin.addon.getAddonInfo('icon')
xbmc.executebuiltin('Notification(%s, %s, %d, %s)' % (title, text,
time, icon))
def get_connection():
global connection
if connection==None:
connected = False
# Create connection
try:
connection = libsonic.Connection(
baseUrl=Addon().get_setting('subsonic_url'),
username=Addon().get_setting('username', convert=False),
password=Addon().get_setting('password', convert=False),
port=Addon().get_setting('port'),
apiVersion=Addon().get_setting('apiversion'),
insecure=Addon().get_setting('insecure'),
legacyAuth=Addon().get_setting('legacyauth'),
useGET=Addon().get_setting('useget'),
)
connected = connection.ping()
except:
pass
if connected==False:
popup('Connection error')
return False
return connection
@plugin.action()
def root(params):
# get connection
connection = get_connection()
if connection==False:
return
listing = []
menus = {
'folders': {
'name': Addon().get_localized_string(30038),
'callback': 'browse_folders',
'thumb': None
},
'library': {
'name': Addon().get_localized_string(30019),
'callback': 'browse_library',
'thumb': None
},
'albums': {
'name': Addon().get_localized_string(30020),
'callback': 'menu_albums',
'thumb': None
},
'tracks': {
'name': Addon().get_localized_string(30021),
'callback': 'menu_tracks',
'thumb': None
},
'playlists': {
'name': Addon().get_localized_string(30022),
'callback': 'list_playlists',
'thumb': None
},
'search': {
'name': Addon().get_localized_string(30039),
'callback': 'search',
'thumb': None
},
}
# Iterate through categories
for mid in menus:
# image
if 'thumb' in menus[mid]:
thumb = menus[mid]['thumb']
listing.append({
'label': menus[mid]['name'],
'thumb': thumb, # Item thumbnail
'fanart': thumb, # Item thumbnail
'url': plugin.get_url(
action=menus[mid]['callback'],
menu_id=mid
)
}) # Item label
add_directory_items(create_listing(
listing,
#succeeded = True, #if False Kodi won’t open a new listing and stays on the current level.
#update_listing = False, #if True, Kodi won’t open a sub-listing but refresh the current one.
#cache_to_disk = True, #cache this view to disk.
sort_methods = None, #he list of integer constants representing virtual folder sort methods.
#view_mode = None, #a numeric code for a skin view mode. View mode codes are different in different skins except for 50 (basic listing).
#content = None #string - current plugin content, e.g. ‘movies’ or ‘episodes’.
))
@plugin.action()
def menu_albums(params):
# get connection
connection = get_connection()
if connection==False:
return
listing = []
menus = {
'albums_newest': {
'name': Addon().get_localized_string(30023),
'thumb': None,
'args': {"ltype": "newest"}
},
'albums_frequent': {
'name': Addon().get_localized_string(30024),
'thumb': None,
'args': {"ltype": "frequent"}
},
'albums_recent': {
'name': Addon().get_localized_string(30025),
'thumb': None,
'args': {"ltype": "recent"}
},
'albums_random': {
'name': Addon().get_localized_string(30026),
'thumb': None,
'args': {"ltype": "random"}
}
}
# Iterate through categories
for menu_id in menus:
menu = menus.get(menu_id)
# image
if 'thumb' in menu:
thumb = menu.get('thumb')
listing.append({
'label': menu.get('name'),
'thumb': menu.get('thumb'), # Item thumbnail
'fanart': menu.get('thumb'), # Item thumbnail
'url': plugin.get_url(
action= 'list_albums',
page= 1,
query_args= json.dumps(menu.get('args')),
menu_id= menu_id
)
}) # Item label
add_directory_items(create_listing(
listing,
#succeeded = True, #if False Kodi won’t open a new listing and stays on the current level.
#update_listing = False, #if True, Kodi won’t open a sub-listing but refresh the current one.
#cache_to_disk = True, #cache this view to disk.
#sort_methods = None, #he list of integer constants representing virtual folder sort methods.
#view_mode = None, #a numeric code for a skin view mode. View mode codes are different in different skins except for 50 (basic listing).
#content = None #string - current plugin content, e.g. ‘movies’ or ‘episodes’.
))
@plugin.action()
def menu_tracks(params):
# get connection
connection = get_connection()
if connection==False:
return
listing = []
menus = {
'tracks_starred': {
'name': Addon().get_localized_string(30036),
'thumb': None
},
'tracks_random': {
'name': Addon().get_localized_string(30037),
'thumb': None
}
}
# Iterate through categories
for menu_id in menus:
menu = menus.get(menu_id)
# image
if 'thumb' in menu:
thumb = menu.get('thumb')
listing.append({
'label': menu.get('name'),
'thumb': menu.get('thumb'), # Item thumbnail
'fanart': menu.get('thumb'), # Item thumbnail
'url': plugin.get_url(
action= 'list_tracks',
menu_id= menu_id
)
}) # Item label
add_directory_items(create_listing(
listing,
#succeeded = True, #if False Kodi won’t open a new listing and stays on the current level.
#update_listing = False, #if True, Kodi won’t open a sub-listing but refresh the current one.
#cache_to_disk = True, #cache this view to disk.
#sort_methods = None, #he list of integer constants representing virtual folder sort methods.
#view_mode = None, #a numeric code for a skin view mode. View mode codes are different in different skins except for 50 (basic listing).
#content = None #string - current plugin content, e.g. ‘movies’ or ‘episodes’.
))
@plugin.action()
#@plugin.cached(cachetime) # cache (in minutes)
def browse_folders(params):
# get connection
connection = get_connection()
if connection==False:
return
listing = []
# Get items
items = walk_folders()
# Iterate through items
for item in items:
entry = {
'label': item.get('name'),
'url': plugin.get_url(
action= 'browse_indexes',
folder_id= item.get('id'),
menu_id= params.get('menu_id')
)
}
listing.append(entry)
if len(listing) == 1:
plugin.log('One single Media Folder found; do return listing from browse_indexes()...')
return browse_indexes(params)
else:
add_directory_items(create_listing(listing))
@plugin.action()
#@plugin.cached(cachetime) # cache (in minutes)
def browse_indexes(params):
# get connection
connection = get_connection()
if connection==False:
return
listing = []
# Get items
# optional folder ID
folder_id = params.get('folder_id')
items = walk_index(folder_id)
# Iterate through items
for item in items:
entry = {
'label': item.get('name'),
'url': plugin.get_url(
action= 'list_directory',
id= item.get('id'),
menu_id= params.get('menu_id')
)
}
listing.append(entry)
add_directory_items(create_listing(
listing
))
@plugin.action()
#@plugin.cached(cachetime) # cache (in minutes)
def list_directory(params):
# get connection
connection = get_connection()
if connection==False:
return
listing = []
# Get items
id = params.get('id')
items = walk_directory(id)
# Iterate through items
for item in items:
#is a directory
if (item.get('isDir')==True):
entry = {
'label': item.get('title'),
'url': plugin.get_url(
action= 'list_directory',
id= item.get('id'),
menu_id= params.get('menu_id')
)
}
else:
entry = get_entry_track(item,params)
listing.append(entry)
add_directory_items(create_listing(
listing
))
@plugin.action()
#@plugin.cached(cachetime) # cache (in minutes)
def browse_library(params):
"""
List artists from the library (ID3 tags)
"""
# get connection
connection = get_connection()
if connection==False:
return
listing = []
# Get items
items = walk_artists()
# Iterate through items
for item in items:
entry = get_entry_artist(item,params)
#context menu actions
context_actions = []
if can_star('artist',item.get('id')):
action_star = context_action_star('artist',item.get('id'))
context_actions.append(action_star)
if len(context_actions) > 0:
entry['context_menu'] = context_actions
listing.append(entry)
add_directory_items(create_listing(
listing,
#succeeded = True, #if False Kodi won’t open a new listing and stays on the current level.
#update_listing = False, #if True, Kodi won’t open a sub-listing but refresh the current one.
cache_to_disk = True, #cache this view to disk.
sort_methods = get_sort_methods('artists',params), #he list of integer constants representing virtual folder sort methods.
#view_mode = None, #a numeric code for a skin view mode. View mode codes are different in different skins except for 50 (basic listing).
content = 'artists' #string - current plugin content, e.g. ‘movies’ or ‘episodes’.
))
@plugin.action()
#@plugin.cached(cachetime) #cache (in minutes)
def list_albums(params):
"""
List albums from the library (ID3 tags)
"""
listing = []
# get connection
connection = get_connection()
if connection==False:
return
#query
query_args = {}
try:
query_args_json = params['query_args']
query_args = json.loads(query_args_json)
except:
pass
#size
albums_per_page = int(Addon().get_setting('albums_per_page'))
query_args["size"] = albums_per_page
#offset
offset = int(params.get('page',1)) - 1;
if offset > 0:
query_args["offset"] = offset * albums_per_page
#debug
query_args_json = json.dumps(query_args)
plugin.log('list_albums with args:' + query_args_json);
#Get items
if 'artist_id' in params:
generator = walk_artist(params.get('artist_id'))
else:
generator = walk_albums(**query_args)
#make a list out of the generator so we can iterate it several times
items = list(generator)
#check if there==only one artist for this album (and then hide it)
artists = [item.get('artist',None) for item in items]
if len(artists) <= 1:
params['hide_artist'] = True
# Iterate through items
for item in items:
album = get_entry_album(item, params)
listing.append(album)
# Root menu
link_root = navigate_root()
listing.append(link_root)
if not 'artist_id' in params:
# Pagination if we've not reached the end of the lsit
# if type(items) != type(True): TO FIX
link_next = navigate_next(params)
listing.append(link_next)
add_directory_items(create_listing(
listing,
#succeeded = True, #if False Kodi won’t open a new listing and stays on the current level.
#update_listing = False, #if True, Kodi won’t open a sub-listing but refresh the current one.
cache_to_disk = True, #cache this view to disk.
sort_methods = get_sort_methods('albums',params),
#view_mode = None, #a numeric code for a skin view mode. View mode codes are different in different skins except for 50 (basic listing).
content = 'albums' #string - current plugin content, e.g. ‘movies’ or ‘episodes’.
))
@plugin.action()
#@plugin.cached(cachetime) #cache (in minutes)
def list_tracks(params):
menu_id = params.get('menu_id')
listing = []
#query
query_args = {}
try:
query_args_json = params['query_args']
query_args = json.loads(query_args_json)
except:
pass
#size
tracks_per_page = int(Addon().get_setting('tracks_per_page'))
query_args["size"] = tracks_per_page
#offset
offset = int(params.get('page',1)) - 1;
if offset > 0:
query_args["offset"] = offset * tracks_per_page
#debug
query_args_json = json.dumps(query_args)
plugin.log('list_tracks with args:' + query_args_json);
# get connection
connection = get_connection()
if connection==False:
return
# Album
if 'album_id' in params:
generator = walk_album(params['album_id'])
# Playlist
elif 'playlist_id' in params:
generator = walk_playlist(params['playlist_id'])
#TO FIX
#tracknumber = 0
#for item in items:
# tracknumber += 1
# items[item]['tracknumber'] = tracknumber
# Starred
elif menu_id == 'tracks_starred':
generator = walk_tracks_starred()
# Random
elif menu_id == 'tracks_random':
generator = walk_tracks_random(**query_args)
# Filters
#else:
#TO WORK
#make a list out of the generator so we can iterate it several times
items = list(generator)
#check if there==only one artist for this album (and then hide it)
artists = [item.get('artist',None) for item in items]
if len(artists) <= 1:
params['hide_artist'] = True
#update stars
if menu_id == 'tracks_starred':
ids_list = [item.get('id') for item in items]
stars_cache_update(ids_list)
# Iterate through items
key = 0;
for item in items:
track = get_entry_track(item,params)
listing.append(track)
key +=1
# Root menu
#link_root = navigate_root()
#listing.append(link_root)
# Pagination if we've not reached the end of the lsit
# if type(items) != type(True): TO FIX
#link_next = navigate_next(params)
#listing.append(link_next)
add_directory_items(create_listing(
listing,
#succeeded = True, #if False Kodi won’t open a new listing and stays on the current level.
#update_listing = False, #if True, Kodi won’t open a sub-listing but refresh the current one.
#cache_to_disk = True, #cache this view to disk.
sort_methods= get_sort_methods('tracks',params),
#view_mode = None, #a numeric code for a skin view mode. View mode codes are different in different skins except for 50 (basic listing).
content = 'songs' #string - current plugin content, e.g. ‘movies’ or ‘episodes’.
))
#stars (persistent) cache==used to know what context action (star/unstar) we should display.
#run this function every time we get starred items.
#ids can be a single ID or a list
#using a set makes sure that IDs will be unique.
@plugin.action()
#@plugin.cached(cachetime) #cache (in minutes)
def list_playlists(params):
# get connection
connection = get_connection()
if connection==False:
return
listing = []
# Get items
items = walk_playlists()
# Iterate through items
for item in items:
entry = get_entry_playlist(item,params)
listing.append(entry)
add_directory_items(create_listing(
listing,
#succeeded = True, #if False Kodi won’t open a new listing and stays on the current level.
#update_listing = False, #if True, Kodi won’t open a sub-listing but refresh the current one.
#cache_to_disk = True, #cache this view to disk.
sort_methods = get_sort_methods('playlists',params), #he list of integer constants representing virtual folder sort methods.
#view_mode = None, #a numeric code for a skin view mode. View mode codes are different in different skins except for 50 (basic listing).
#content = None #string - current plugin content, e.g. ‘movies’ or ‘episodes’.
))
@plugin.action()
#@plugin.cached(cachetime) #cache (in minutes)
def search(params):
dialog = xbmcgui.Dialog()
d = dialog.input(Addon().get_localized_string(30039), type=xbmcgui.INPUT_ALPHANUM)
if not d:
d = " "
# get connection
connection = get_connection()
if connection==False:
return
listing = []
# Get items
items = connection.search2(query=d)
# Iterate through items
for item in items.get('searchResult2').get('song'):
entry = get_entry_track( item, params)
listing.append(entry)
if len(listing) == 1:
plugin.log('One single Media Folder found; do return listing from browse_indexes()...')
return browse_indexes(params)
else:
add_directory_items(create_listing(listing))
@plugin.action()
def play_track(params):
id = params['id']
plugin.log('play_track #' + id);
# get connection
connection = get_connection()
if connection==False:
return
url = connection.streamUrl(sid=id,
maxBitRate=Addon().get_setting('bitrate_streaming'),
tformat=Addon().get_setting('transcode_format_streaming')
)
#return url
_set_resolved_url(resolve_url(url))
@plugin.action()
def star_item(params):
ids= params.get('ids'); #can be single or lists of IDs
unstar= params.get('unstar',False);
unstar = (unstar) and (unstar != 'None') and (unstar != 'False') #TO FIX better statement ?
type= params.get('type');
sids = albumIds = artistIds = None
#validate type
if type == 'track':
sids = ids
elif type == 'artist':
artistIds = ids
elif type == 'album':
albumIds = ids
#validate capability
if not can_star(type,ids):
return;
#validate IDs
if (not sids and not artistIds and not albumIds):
return;
# get connection
connection = get_connection()
if connection==False:
return
###
did_action = False
try:
if unstar:
request = connection.unstar(sids, albumIds, artistIds)
else:
request = connection.star(sids, albumIds, artistIds)
if request['status'] == 'ok':
did_action = True
except:
pass
if did_action:
if unstar:
message = Addon().get_localized_string(30031)
plugin.log('Unstarred %s #%s' % (type,json.dumps(ids)))
else: #star
message = Addon().get_localized_string(30032)
plugin.log('Starred %s #%s' % (type,json.dumps(ids)))
stars_cache_update(ids,unstar)
popup(message)
#TO FIX clear starred lists caches ?
#TO FIX refresh current list after star set ?
else:
if unstar:
plugin.log_error('Unable to unstar %s #%s' % (type,json.dumps(ids)))
else:
plugin.log_error('Unable to star %s #%s' % (type,json.dumps(ids)))
#return did_action
return
@plugin.action()
def download_item(params):
id= params.get('id'); #can be single or lists of IDs
type= params.get('type');
#validate path
download_folder = Addon().get_setting('download_folder')
if not download_folder:
popup("Please set a directory for your downloads")
plugin.log_error("No directory set for downloads")
#validate capability
if not can_download(type,id):
return;
if type == 'track':
did_action = download_tracks(id)
elif type == 'album':
did_action = download_album(id)
if did_action:
plugin.log('Downloaded %s #%s' % (type,id))
popup('Item has been downloaded!')
else:
plugin.log_error('Unable to downloaded %s #%s' % (type,id))
return did_action
#@plugin.cached(cachetime) #cache (in minutes)
def get_entry_playlist(item,params):
image = connection.getCoverArtUrl(item.get('coverArt'))
return {
'label': item.get('name'),
'thumb': image,
'fanart': image,
'url': plugin.get_url(
action= 'list_tracks',
playlist_id= item.get('id'),
menu_id= params.get('menu_id')
),
'info': {'music': { ##http://romanvm.github.io/Kodistubs/_autosummary/xbmcgui.html#xbmcgui.ListItem.setInfo
'title': item.get('name'),
'count': item.get('songCount'),
'duration': item.get('duration'),
'date': convert_date_from_iso8601(item.get('created'))
}}
}
#star (or unstar) an item
#@plugin.cached(cachetime) #cache (in minutes)
def get_entry_artist(item,params):
image = connection.getCoverArtUrl(item.get('coverArt'))
return {
'label': get_starred_label(item.get('id'),item.get('name')),
'thumb': image,
'fanart': image,
'url': plugin.get_url(
action= 'list_albums',
artist_id= item.get('id'),
menu_id= params.get('menu_id')
),
'info': {
'music': { ##http://romanvm.github.io/Kodistubs/_autosummary/xbmcgui.html#xbmcgui.ListItem.setInfo
'count': item.get('albumCount'),
'artist': item.get('name')
}
}
}
#@plugin.cached(cachetime) #cache (in minutes)
def get_entry_album(item, params):
image = connection.getCoverArtUrl(item.get('coverArt'))
entry = {
'label': get_entry_album_label(item,params.get('hide_artist',False)),
'thumb': image,
'fanart': image,
'url': plugin.get_url(
action= 'list_tracks',
album_id= item.get('id'),
hide_artist= item.get('hide_artist'),
menu_id= params.get('menu_id')
),
'info': {
'music': { ##http://romanvm.github.io/Kodistubs/_autosummary/xbmcgui.html#xbmcgui.ListItem.setInfo
'count': item.get('songCount'),
'date': convert_date_from_iso8601(item.get('created')), #date added
'duration': item.get('duration'),
'artist': item.get('artist'),
'album': item.get('name'),
'year': item.get('year')
}
}
}
#context menu actions
context_actions = []
if can_star('album',item.get('id')):
action_star = context_action_star('album',item.get('id'))
context_actions.append(action_star)
if can_download('album',item.get('id')):
action_download = context_action_download('album',item.get('id'))
context_actions.append(action_download)
if len(context_actions) > 0:
entry['context_menu'] = context_actions
return entry
#@plugin.cached(cachetime) #cache (in minutes)
def get_entry_track(item,params):
menu_id = params.get('menu_id')
image = connection.getCoverArtUrl(item.get('coverArt'))
entry = {
'label': get_entry_track_label(item,params.get('hide_artist')),
'thumb': image,
'fanart': image,
'url': plugin.get_url(
action= 'play_track',
id= item.get('id'),
menu_id= menu_id
),
'is_playable': True,
'mime': item.get("contentType"),
'info': {'music': { #http://romanvm.github.io/Kodistubs/_autosummary/xbmcgui.html#xbmcgui.ListItem.setInfo
'title': item.get('title'),
'album': item.get('album'),
'artist': item.get('artist'),
'tracknumber': item.get('tracknumber'),
'year': item.get('year'),
'genre': item.get('genre'),
'size': item.get('size'),
'duration': item.get('duration'),
'date': item.get('created')
}
}
}
#context menu actions
context_actions = []
if can_star('track',item.get('id')):
action_star = context_action_star('track',item.get('id'))
context_actions.append(action_star)
if can_download('track',item.get('id')):
action_download = context_action_download('track',item.get('id'))
context_actions.append(action_download)
if len(context_actions) > 0:
entry['context_menu'] = context_actions
return entry
#@plugin.cached(cachetime) #cache (in minutes)
def get_starred_label(id,label):
if is_starred(id):
label = '[COLOR=FF00FF00]%s[/COLOR]' % label
return label
def is_starred(id):
starred = stars_cache_get()
id = int(id)
if id in starred:
return True
else:
return False
#@plugin.cached(cachetime) #cache (in minutes)
def get_entry_track_label(item,hide_artist = False):
if hide_artist:
label = item.get('title', '<Unknown>')
else:
label = '%s - %s' % (
item.get('artist', '<Unknown>'),
item.get('title', '<Unknown>')
)
return get_starred_label(item.get('id'),label)
#@plugin.cached(cachetime) #cache (in minutes)
def get_entry_album_label(item,hide_artist = False):
if hide_artist:
label = item.get('name', '<Unknown>')
else:
label = '%s - %s' % (item.get('artist', '<Unknown>'),
item.get('name', '<Unknown>'))
return get_starred_label(item.get('id'),label)
#@plugin.cached(cachetime) #cache (in minutes)
def get_sort_methods(type,params):
#sort method for list types
#https://github.com/xbmc/xbmc/blob/master/xbmc/SortFileItem.h
#TO FIX _DATE or _DATEADDED ?
#TO FIX
#actually it seems possible to 'restore' the default sorting (by labels)
#so our starred items don't get colorized.
#so do not sort stuff
#see http://forum.kodi.tv/showthread.php?tid=293037
return []
sortable = [
xbmcplugin.SORT_METHOD_NONE,
xbmcplugin.SORT_METHOD_LABEL,
xbmcplugin.SORT_METHOD_UNSORTED
]
if type=='artists':
artists = [
xbmcplugin.SORT_METHOD_ARTIST
]
sortable = sortable + artists
elif type=='albums':
albums = [
xbmcplugin.SORT_METHOD_ALBUM,
xbmcplugin.SORT_METHOD_DURATION,
xbmcplugin.SORT_METHOD_DATE,
#xbmcplugin.SORT_METHOD_YEAR
]
if not params.get('hide_artist',False):
albums.append(xbmcplugin.SORT_METHOD_ARTIST)
sortable = sortable + albums
elif type=='tracks':
tracks = [
xbmcplugin.SORT_METHOD_TITLE,
xbmcplugin.SORT_METHOD_ALBUM,
xbmcplugin.SORT_METHOD_TRACKNUM,
#xbmcplugin.SORT_METHOD_YEAR,
xbmcplugin.SORT_METHOD_GENRE,
xbmcplugin.SORT_METHOD_SIZE,
xbmcplugin.SORT_METHOD_DURATION,
xbmcplugin.SORT_METHOD_DATE,
xbmcplugin.SORT_METHOD_BITRATE
]
if not params.get('hide_artist',False):
tracks.append(xbmcplugin.SORT_METHOD_ARTIST)
if params.get('playlist_id',False):
xbmcplugin.SORT_METHOD_PLAYLIST_ORDER,
sortable = sortable + tracks
elif type=='playlists':
playlists = [
xbmcplugin.SORT_METHOD_TITLE,
xbmcplugin.SORT_METHOD_DURATION,
xbmcplugin.SORT_METHOD_DATE
]
sortable = sortable + playlists
return sortable
def stars_cache_update(ids,remove=False):
#get existing cache set
starred = stars_cache_get()
#make sure this==a list
if not isinstance(ids, list):
ids = [ids]
#abord if empty
if len(ids) == 0:
return
#parse items
for item_id in ids:
item_id = int(item_id)
if not remove:
starred.add(item_id)
else:
starred.remove(item_id)
#store them
with plugin.get_storage() as storage:
storage['starred_ids'] = starred
plugin.log('stars_cache_update:')
plugin.log(starred)
def stars_cache_get(): #Retrieving stars from cache is too slow, so load to local variable
global local_starred
plugin.log(len(local_starred))
if(len(local_starred)>0):
plugin.log('stars already loaded:')
plugin.log(local_starred)
return(local_starred)
else:
with plugin.get_storage() as storage:
local_starred = storage.get('starred_ids',set())
plugin.log('stars_cache_get:')
plugin.log(local_starred)
return local_starred
def navigate_next(params):
page = int(params.get('page',1))
page += 1
title = Addon().get_localized_string(30029) +"(%d)" % (page)
return {
'label': title,
'url': plugin.get_url(
action= params.get('action',None),
page= page,
query_args= params.get('query_args',None)
)
}
def navigate_root():
return {
'label': Addon().get_localized_string(30030),
'url': plugin.get_url(action='root')
}
#converts a date string from eg. '2012-04-17T19:53:44' to eg. '17.04.2012'
def convert_date_from_iso8601(iso8601):
date_obj = dateutil.parser.parse(iso8601)
return date_obj.strftime('%d.%m.%Y')
def context_action_star(type,id):
starred = is_starred(id)
if not starred:
label = Addon().get_localized_string(30033)
else:
#Should be available only in the stars lists;
#so we don't have to fetch the starred status for each item
#(since it is not available into the XML response from the server)
label = Addon().get_localized_string(30034)
xbmc.log('Context action star returning RunPlugin(%s)' % plugin.get_url(action='star_item',type=type,ids=id,unstar=starred),xbmc.LOGDEBUG)
return (
label,
'RunPlugin(%s)' % plugin.get_url(action='star_item',type=type,ids=id,unstar=starred)
)
#Subsonic API says this==supported for artist,tracks and albums,
#But I can see it available only for tracks on Subsonic 5.3, so disable it.
def can_star(type,ids = None):
if not ids:
return False
if not isinstance(ids, list) or isinstance(ids, tuple):
ids = [ids]
if len(ids) == 0:
return False
if type == 'track':
return True
elif type == 'artist':
return False
elif type == 'album':
return False
def context_action_download(type,id):
label = Addon().get_localized_string(30035)
return (
label,
'RunPlugin(%s)' % plugin.get_url(action='download_item',type=type,id=id)
)
def can_download(type,id = None):
if id==None:
return False
if type == 'track':
return True
elif type == 'album':
return True
def download_tracks(ids):
#popup==fired before, in download_item
download_folder = Addon().get_setting('download_folder')
if not download_folder:
return
if not ids:
return False
#make list
if not isinstance(ids, list) or isinstance(ids, tuple):
ids = [ids]
ids_count = len(ids)
#check if empty
if ids_count == 0:
return False
plugin.log('download_tracks IDs:')
plugin.log(json.dumps(ids))
# get connection
connection = get_connection()
if connection==False:
return
#progress...
pc_step = 100/ids_count
pc_progress = 0
ids_parsed = 0
progressdialog = xbmcgui.DialogProgress()
progressdialog.create("Downloading tracks...") #Title
for id in ids:
if (progressdialog.iscanceled()):
return False
# debug
plugin.log('Trying to download track #'+str(id))
# get track infos
response = connection.getSong(id);
track = response.get('song')
plugin.log('Track info :')
plugin.log(track)
# progress bar
pc_progress = ids_parsed * pc_step
progressdialog.update(pc_progress, 'Getting track informations...',get_entry_track_label(track))
track_path_relative = track.get("path", None).encode('utf8', 'replace') # 'Radiohead/Kid A/Idioteque.mp3'
track_path = os.path.join(download_folder, track_path_relative) # 'C:/users/.../Radiohead/Kid A/Idioteque.mp3'
track_directory = os.path.dirname(os.path.abspath(track_path)) # 'C:/users/.../Radiohead/Kid A'
#check if file exists
if os.path.isfile(track_path):
progressdialog.update(pc_progress, 'Track has already been downloaded!')
plugin.log("File '%s' already exists" % (id))
else:
progressdialog.update(pc_progress, "Downloading track...",track_path)
try:
#get remote file (file-object like)
file_obj = connection.download(id)
#create directory if it does not exists
if not os.path.exists(track_directory):
os.makedirs(track_directory)
#create blank file
file = open(track_path, 'a') #create a new file but don't erase the existing one if it exists
#fill blank file
shutil.copyfileobj(file_obj, file)
file.close()
except:
popup("Error while downloading track #%s" % (id))
plugin.log("Error while downloading track #%s" % (id))
pass
ids_parsed += 1
progressdialog.update(100, "Done !","Enjoy !")
xbmc.sleep(1000)
progressdialog.close()
def download_album(id):
# get connection
connection = get_connection()
if connection==False:
return
# get album infos
response = connection.getAlbum(id);
album = response.get('album')
tracks = album.get('song')
plugin.log('getAlbum:')
plugin.log(json.dumps(album))
ids = [] #list of track IDs
for i, track in enumerate(tracks):
track_id = track.get('id')
ids.append(track_id)
download_tracks(ids)
#@plugin.cached(cachetime) #cache (in minutes)
def create_listing(listing, succeeded=True, update_listing=False, cache_to_disk=False, sort_methods=None,view_mode=None, content=None, category=None):
return ListContext(listing, succeeded, update_listing, cache_to_disk,sort_methods, view_mode, content, category)
def resolve_url(path='', play_item=None, succeeded=True):
"""
Create and return a context dict to resolve a playable URL
:param path: the path to a playable media.
:type path: str or unicode
:param play_item: a dict of item properties as described in the class docstring.
It allows to set additional properties for the item being played, like graphics, metadata etc.
if ``play_item`` parameter==present, then ``path`` value==ignored, and the path must be set via
``'path'`` property of a ``play_item`` dict.
:type play_item: dict
:param succeeded: if ``False``, Kodi won't play anything
:type succeeded: bool
:return: context object containing necessary parameters
for Kodi to play the selected media.
:rtype: PlayContext
"""
return PlayContext(path, play_item, succeeded)
#@plugin.cached(cachetime) #cache (in minutes)
def create_list_item(item):
"""
Create an :class:`xbmcgui.ListItem` instance from an item dict
:param item: a dict of ListItem properties
:type item: dict
:return: ListItem instance
:rtype: xbmcgui.ListItem
"""
major_version = xbmc.getInfoLabel('System.BuildVersion')[:2]
if major_version >= '18':
list_item = xbmcgui.ListItem(label=item.get('label', ''),
label2=item.get('label2', ''),
path=item.get('path', ''),
offscreen=item.get('offscreen', False))
art = item.get('art', {})
art['thumb'] = item.get('thumb', '')
art['icon'] = item.get('icon', '')
art['fanart'] = item.get('fanart', '')
item['art'] = art
cont_look = item.get('content_lookup')
if cont_look is not None:
list_item.setContentLookup(cont_look)
if item.get('art'):
list_item.setArt(item['art'])
if item.get('stream_info'):
for stream, stream_info in item['stream_info'].items():
list_item.addStreamInfo(stream, stream_info)
if item.get('info'):
for media, info in item['info'].items():
list_item.setInfo(media, info)
if item.get('context_menu') is not None:
list_item.addContextMenuItems(item['context_menu'])
if item.get('subtitles'):
list_item.setSubtitles(item['subtitles'])
if item.get('mime'):
list_item.setMimeType(item['mime'])
if item.get('properties'):
for key, value in item['properties'].items():
list_item.setProperty(key, value)
if major_version >= '17':
cast = item.get('cast')
if cast is not None:
list_item.setCast(cast)
db_ids = item.get('online_db_ids')
if db_ids is not None:
list_item.setUniqueIDs(db_ids)
ratings = item.get('ratings')
if ratings is not None:
for rating in ratings:
list_item.setRating(**rating)
return list_item
def _set_resolved_url(context):
plugin.log_debug('Resolving URL from {0}'.format(str(context)))
if context.play_item==None:
list_item = xbmcgui.ListItem(path=context.path)
else:
list_item = self.create_list_item(context.play_item)
xbmcplugin.setResolvedUrl(plugin.handle, context.succeeded, list_item)
#@plugin.cached(cachetime) #cache (in minutes)
def add_directory_items(context):
plugin.log_debug('Creating listing from {0}'.format(str(context)))
if context.category is not None:
xbmcplugin.setPluginCategory(plugin.handle, context.category)
if context.content is not None:
xbmcplugin.setContent(plugin.handle, context.content) # This must be at the beginning
for item in context.listing:
is_folder = item.get('is_folder', True)
if item.get('list_item') is not None:
list_item = item['list_item']
else:
list_item = create_list_item(item)
if item.get('is_playable'):
list_item.setProperty('IsPlayable', 'true')
is_folder = False
xbmcplugin.addDirectoryItem(plugin.handle, item['url'], list_item, is_folder)
if context.sort_methods is not None:
if isinstance(context.sort_methods, (int, dict)):
sort_methods = [context.sort_methods]
elif isinstance(context.sort_methods, (tuple, list)):
sort_methods = context.sort_methods
else:
raise TypeError(
'sort_methods parameter must be of int, dict, tuple or list type!')
for method in sort_methods:
if isinstance(method, int):
xbmcplugin.addSortMethod(plugin.handle, method)
elif isinstance(method, dict):
xbmcplugin.addSortMethod(plugin.handle, **method)
else:
raise TypeError(
'method parameter must be of int or dict type!')
xbmcplugin.endOfDirectory(plugin.handle,
context.succeeded,
context.update_listing,
context.cache_to_disk)
if context.view_mode is not None:
xbmc.executebuiltin('Container.SetViewMode({0})'.format(context.view_mode))
def walk_index(folder_id=None):
"""
Request Subsonic's index and iterate each item.
"""
response = connection.getIndexes(folder_id)
for index in response["indexes"]["index"]:
for artist in index["artist"]:
yield artist
def walk_playlists():
"""
Request Subsonic's playlists and iterate over each item.
"""
response = connection.getPlaylists()
for child in response["playlists"]["playlist"]:
yield child
def walk_playlist(playlist_id):
"""
Request Subsonic's playlist items and iterate over each item.
"""
response = connection.getPlaylist(playlist_id)
for child in response["playlist"]["entry"]:
yield child
def walk_folders():
response = connection.getMusicFolders()
for child in response["musicFolders"]["musicFolder"]:
yield child
def walk_directory(directory_id):
"""
Request a Subsonic music directory and iterate over each item.
"""
response = connection.getMusicDirectory(directory_id)
try:
for child in response["directory"]["child"]:
if child.get("isDir"):
for child in walk_directory(child["id"]):
yield child
else:
yield child
except:
yield from ()
def walk_artist(artist_id):
"""
Request a Subsonic artist and iterate over each album.
"""
response = connection.getArtist(artist_id)
for child in response["artist"]["album"]:
yield child
def walk_artists():
"""
(ID3 tags)
Request all artists and iterate over each item.
"""
response = connection.getArtists()
for index in response["artists"]["index"]:
for artist in index["artist"]:
yield artist
def walk_genres():
"""
(ID3 tags)
Request all genres and iterate over each item.
"""
response = connection.getGenres()
for genre in response["genres"]["genre"]:
yield genre
def walk_albums(ltype, size=None, fromYear=None,toYear=None, genre=None, offset=None):
"""
(ID3 tags)
Request all albums for a given genre and iterate over each album.
"""
if ltype == 'byGenre' and genre is None:
return
if ltype == 'byYear' and (fromYear is None or toYear is None):
return
response = connection.getAlbumList2(
ltype=ltype, size=size, fromYear=fromYear, toYear=toYear,genre=genre, offset=offset)
if not response["albumList2"]["album"]:
return
for album in response["albumList2"]["album"]:
yield album
def walk_album(album_id):
"""
(ID3 tags)
Request an album and iterate over each item.
"""
response = connection.getAlbum(album_id)
for song in response["album"]["song"]:
yield song
def walk_tracks_random(size=None, genre=None, fromYear=None,toYear=None):
"""
Request random songs by genre and/or year and iterate over each song.
"""
response = connection.getRandomSongs(
size=size, genre=genre, fromYear=fromYear, toYear=toYear)
for song in response["randomSongs"]["song"]:
yield song
def walk_tracks_starred():
"""
Request Subsonic's starred songs and iterate over each item.
"""
response = connection.getStarred()
for song in response["starred"]["song"]:
yield song
# Start plugin from within Kodi.
if __name__ == "__main__":
# Map actions
# Note that we map callable objects without brackets ()
plugin.run()
|
mit
| 1,061,290,670,147,951,900 | 29.268734 | 151 | 0.568871 | false | 3.938141 | false | false | false |
WilJoey/tn_ckan
|
ckan/lib/field_types.py
|
1
|
12439
|
import re
import time
import datetime
import warnings
with warnings.catch_warnings():
warnings.filterwarnings('ignore', '.*compile_mappers.*')
import formalchemy
from ckan.common import OrderedDict
months = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December']
class DateConvertError(Exception):
pass
class DateType(object):
'''Utils for handling dates in forms.
* Full or partial dates
* User inputs in form DD/MM/YYYY and it is stored in db as YYYY-MM-DD.
'''
format_types = ('form', 'db')
datetime_fields = OrderedDict([('year', (1000, 2100, 4, 'YYYY')),
('month', (1, 12, 2, 'MM')),
('day', (1, 31, 2, 'DD')),
('hour', (0, 23, 2, 'HH')),
('minute', (0, 59, 2, 'MM')),
])
datetime_fields_indexes = {'min':0, 'max':1, 'digits':2, 'format_code':3}
date_fields_order = {'db':('year', 'month', 'day'),
'form':('day', 'month', 'year')}
parsing_separators = {'date':'-/',
'time':':\.'}
default_separators = {'db':{'date':'-',
'time':':'},
'form':{'date':'/',
'time':':'},}
field_code_map = {'year':'YYYY', 'month':'MM', 'day':'DD',
'hour':'HH', 'minute':'MM'}
word_match = re.compile('[A-Za-z]+')
timezone_match = re.compile('(\s[A-Z]{3})|(\s[+-]\d\d:?\d\d)')
months_abbreviated = [month[:3] for month in months]
@classmethod
def parse_timedate(cls, timedate_str, format_type):
'''Takes a timedate and returns a dictionary of the fields.
* Little validation is done.
* If it can\'t understand the layout it raises DateConvertError
'''
assert format_type in cls.format_types
if not hasattr(cls, 'matchers'):
# build up a list of re matches for the different
# acceptable ways of expressing the time and date
cls.matchers = {}
cls.readable_formats = {}
for format_type_ in cls.format_types:
finished_regexps = []
readable_formats = [] # analogous to the regexps,
# but human readable
year_re = '(?P<%s>\d{2,4})'
month_re = '(?P<%s>\w+)'
two_digit_decimal_re = '(?P<%s>\d{1,2})'
time_re = '%s[%s]%s' % (
two_digit_decimal_re % 'hour',
cls.parsing_separators['time'],
two_digit_decimal_re % 'minute')
time_readable = '%s%s%s' % (
cls.datetime_fields['hour'][cls.datetime_fields_indexes['format_code']],
cls.default_separators[format_type_]['time'],
cls.datetime_fields['minute'][cls.datetime_fields_indexes['format_code']])
date_field_re = {'year':year_re % 'year',
'month':month_re % 'month',
'day':two_digit_decimal_re % 'day'}
date_fields = list(cls.date_fields_order[format_type_])
for how_specific in ('day', 'month', 'year'):
date_sep_re = '[%s]' % cls.parsing_separators['date']
date_sep_readable = cls.default_separators[format_type_]['date']
date_field_regexps = [date_field_re[field] for field in date_fields]
date_field_readable = [cls.datetime_fields[field][cls.datetime_fields_indexes['format_code']] for field in date_fields]
date_re = date_sep_re.join(date_field_regexps)
date_readable = date_sep_readable.join(date_field_readable)
finished_regexps.append(date_re)
readable_formats.append(date_readable)
date_fields.remove(how_specific)
full_date_re = finished_regexps[0]
full_date_readable = readable_formats[0]
# Allow time to be before or after the date
for format_ in ('%(time_re)s%(sep)s%(full_date_re)s',
'%(full_date_re)s%(sep)s%(time_re)s'):
finished_regexps.insert(0, format_ % {
'time_re':time_re,
'sep':'\s',
'full_date_re':full_date_re})
readable_formats.insert(0, format_ % {
'time_re':time_readable,
'sep':' ',
'full_date_re':full_date_readable})
cls.matchers[format_type_] = [re.compile('^%s$' % regexp) for regexp in finished_regexps]
cls.readable_formats[format_type_] = readable_formats
#print format_type_, finished_regexps, readable_formats
for index, matcher in enumerate(cls.matchers[format_type]):
match = matcher.match(timedate_str)
if match:
timedate_dict = match.groupdict()
timedate_dict = cls.int_timedate(timedate_dict)
timedate_dict['readable_format'] = cls.readable_formats[format_type][index]
return timedate_dict
else:
acceptable_formats = ', '.join(["'%s'" % format_ for format_ in cls.readable_formats[format_type]])
raise DateConvertError("Cannot parse %s date '%s'. Acceptable formats: %s" % (format_type, timedate_str, acceptable_formats))
@classmethod
def int_timedate(cls, timedate_dict):
# Convert timedate string values to integers
int_timedate_dict = timedate_dict.copy()
for field in cls.datetime_fields.keys():
if timedate_dict.has_key(field):
val = timedate_dict[field]
if field == 'year':
if len(val) == 2:
# Deal with 2 digit dates
try:
int_val = int(val)
except ValueError:
raise DateConvertError('Expecting integer for %s value: %s' % (field, val))
val = cls.add_centurys_to_two_digit_year(int_val)
elif len(val) == 3:
raise DateConvertError('Expecting 2 or 4 digit year: "%s"' % (val))
if field == 'month':
# Deal with months expressed as words
if val in months:
val = months.index(val) + 1
if val in cls.months_abbreviated:
val = cls.months_abbreviated.index(val) + 1
try:
int_timedate_dict[field] = int(val)
except ValueError:
raise DateConvertError('Expecting integer for %s value: %s' % (field, val))
return int_timedate_dict
@classmethod
def iso_to_db(cls, iso_date, format):
# e.g. 'Wed, 06 Jan 2010 09:30:00'
# '%a, %d %b %Y %H:%M:%S'
assert isinstance(iso_date, (unicode, str))
try:
date_tuple = time.strptime(iso_date, format)
except ValueError, e:
raise DateConvertError('Could not read date as ISO format "%s". Date provided: "%s"' % (format, iso_date))
date_obj = datetime.datetime(*date_tuple[:4])
date_str = cls.date_to_db(date_obj)
return date_str
@classmethod
def strip_iso_timezone(cls, iso_date):
return cls.timezone_match.sub('', iso_date)
@classmethod
def form_to_db(cls, form_str, may_except=True):
'''
27/2/2005 -> 2005-02-27
27/Feb/2005 -> 2005-02-27
2/2005 -> 2005-02
Feb/2005 -> 2005-02
2005 -> 2005
'''
try:
# Allow blank input or None
if not form_str:
return u''
form_str = form_str.strip()
if not form_str:
return u''
# Parse form value
timedate_dict = cls.parse_timedate(form_str, 'form')
# Check range of dates and format as standard string
try:
db_datetime = cls.format(timedate_dict, 'db')
except DateConvertError, e:
msg = 'Date error reading in format \'%s\': %s' % (timedate_dict['readable_format'], ' '.join(e.args))
raise DateConvertError(msg)
return db_datetime
except DateConvertError, e:
if may_except:
raise e
else:
return form_str
@classmethod
def date_to_db(cls, date):
'''
datetime.date(2005, 2, 27) -> 2005-02-27
'''
assert isinstance(date, datetime.date)
date_str = date.strftime('%Y-%m-%d')
return date_str
@classmethod
def format(cls, datetime_dict, format_type):
'''Takes datetime_dict and formats them either for
the form or the database. If it encounters an out
of range value, it raises an exception.
'''
assert isinstance(datetime_dict, dict)
assert format_type in ('form', 'db')
# convert each field to a string
str_datetime_dict = {} # strings by field
for field in cls.datetime_fields:
if not datetime_dict.has_key(field):
break
val = datetime_dict[field]
min_, max_ = cls.datetime_fields[field][cls.datetime_fields_indexes['min']:cls.datetime_fields_indexes['max'] + 1]
if val < min_ or val > max_:
raise DateConvertError('%s value of "%s" is out of range.' % (field.capitalize(), val))
if format_type == 'form':
int_format_string = '%d'
elif format_type == 'db':
num_digits = cls.datetime_fields['hour'][cls.datetime_fields_indexes['digits']]
int_format_string = '%%0%sd' % num_digits
str_datetime_dict[field] = int_format_string % val
# assemble the date
date_fields = []
for field in cls.date_fields_order[format_type]:
if str_datetime_dict.has_key(field):
date_fields.append(str_datetime_dict[field])
formatted_datetime = unicode(cls.default_separators[format_type]['date'].join(date_fields))
# add in the time if specified
if str_datetime_dict.has_key('hour'):
if format_type == 'form':
datetime_format_string = '%(hour)s%(time_separator)s%(minute)s %(date)s'
elif format_type == 'db':
datetime_format_string = '%(date)s %(hour)s%(time_separator)s%(minute)s'
format_dict = str_datetime_dict.copy()
format_dict['date'] = formatted_datetime
format_dict['time_separator'] = cls.default_separators[format_type]['time']
formatted_datetime = datetime_format_string % format_dict
return formatted_datetime
@staticmethod
def form_validator(form_date_str, field=None):
try:
DateType.form_to_db(form_date_str)
except DateConvertError, e:
raise formalchemy.ValidationError(e)
@classmethod
def db_to_form(cls, db_str):
'2005-02-27 -> 27/2/2005 if correct format, otherwise, display as is.'
db_str = db_str.strip()
if not db_str:
return db_str
try:
timedate_dict = cls.parse_timedate(db_str, 'db')
except DateConvertError, e:
# cannot parse - simply display as-is
return db_str
try:
datetime_form = cls.format(timedate_dict, 'form')
except DateConvertError, e:
# values out of range - simply display as-is
return db_str
return datetime_form
@classmethod
def add_centurys_to_two_digit_year(cls, year, near_year=2010):
assert isinstance(year, int)
assert isinstance(near_year, int)
assert year < 1000, repr(year)
assert near_year > 1000 and near_year < 2200, repr(near_year)
year += 1000
while abs(year - near_year) > 50:
year += 100
return year
|
mit
| -3,390,214,483,191,561,700 | 43.584229 | 139 | 0.51845 | false | 4.047836 | false | false | false |
fergalmoran/dss
|
spa/migrations/0056_auto__add_field_label_object_created__add_field_label_object_updated__.py
|
1
|
38640
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Label.object_created'
db.add_column(u'spa_label', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'Label.object_updated'
db.add_column(u'spa_label', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'Playlist.object_created'
db.add_column(u'spa_playlist', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'Playlist.object_updated'
db.add_column(u'spa_playlist', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'Mix.object_created'
db.add_column(u'spa_mix', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'Mix.object_updated'
db.add_column(u'spa_mix', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'Tracklist.object_created'
db.add_column(u'spa_tracklist', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'Tracklist.object_updated'
db.add_column(u'spa_tracklist', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'ReleaseAudio.object_created'
db.add_column(u'spa_releaseaudio', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'ReleaseAudio.object_updated'
db.add_column(u'spa_releaseaudio', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'Genre.object_created'
db.add_column(u'spa_genre', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'Genre.object_updated'
db.add_column(u'spa_genre', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'UserProfile.object_created'
db.add_column(u'spa_userprofile', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'UserProfile.object_updated'
db.add_column(u'spa_userprofile', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'Venue.object_created'
db.add_column(u'spa_venue', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'Venue.object_updated'
db.add_column(u'spa_venue', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field '_Lookup.object_created'
db.add_column(u'spa__lookup', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field '_Lookup.object_updated'
db.add_column(u'spa__lookup', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'Activity.object_created'
db.add_column(u'spa_activity', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'Activity.object_updated'
db.add_column(u'spa_activity', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'Release.object_created'
db.add_column(u'spa_release', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'Release.object_updated'
db.add_column(u'spa_release', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'PurchaseLink.object_created'
db.add_column(u'spa_purchaselink', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'PurchaseLink.object_updated'
db.add_column(u'spa_purchaselink', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'ChatMessage.object_created'
db.add_column(u'spa_chatmessage', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'ChatMessage.object_updated'
db.add_column(u'spa_chatmessage', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'Comment.object_created'
db.add_column(u'spa_comment', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'Comment.object_updated'
db.add_column(u'spa_comment', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'Notification.object_created'
db.add_column(u'spa_notification', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'Notification.object_updated'
db.add_column(u'spa_notification', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Label.object_created'
db.delete_column(u'spa_label', 'object_created')
# Deleting field 'Label.object_updated'
db.delete_column(u'spa_label', 'object_updated')
# Deleting field 'Playlist.object_created'
db.delete_column(u'spa_playlist', 'object_created')
# Deleting field 'Playlist.object_updated'
db.delete_column(u'spa_playlist', 'object_updated')
# Deleting field 'Mix.object_created'
db.delete_column(u'spa_mix', 'object_created')
# Deleting field 'Mix.object_updated'
db.delete_column(u'spa_mix', 'object_updated')
# Deleting field 'Tracklist.object_created'
db.delete_column(u'spa_tracklist', 'object_created')
# Deleting field 'Tracklist.object_updated'
db.delete_column(u'spa_tracklist', 'object_updated')
# Deleting field 'ReleaseAudio.object_created'
db.delete_column(u'spa_releaseaudio', 'object_created')
# Deleting field 'ReleaseAudio.object_updated'
db.delete_column(u'spa_releaseaudio', 'object_updated')
# Deleting field 'Genre.object_created'
db.delete_column(u'spa_genre', 'object_created')
# Deleting field 'Genre.object_updated'
db.delete_column(u'spa_genre', 'object_updated')
# Deleting field 'UserProfile.object_created'
db.delete_column(u'spa_userprofile', 'object_created')
# Deleting field 'UserProfile.object_updated'
db.delete_column(u'spa_userprofile', 'object_updated')
# Deleting field 'Venue.object_created'
db.delete_column(u'spa_venue', 'object_created')
# Deleting field 'Venue.object_updated'
db.delete_column(u'spa_venue', 'object_updated')
# Deleting field '_Lookup.object_created'
db.delete_column(u'spa__lookup', 'object_created')
# Deleting field '_Lookup.object_updated'
db.delete_column(u'spa__lookup', 'object_updated')
# Deleting field 'Activity.object_created'
db.delete_column(u'spa_activity', 'object_created')
# Deleting field 'Activity.object_updated'
db.delete_column(u'spa_activity', 'object_updated')
# Deleting field 'Release.object_created'
db.delete_column(u'spa_release', 'object_created')
# Deleting field 'Release.object_updated'
db.delete_column(u'spa_release', 'object_updated')
# Deleting field 'PurchaseLink.object_created'
db.delete_column(u'spa_purchaselink', 'object_created')
# Deleting field 'PurchaseLink.object_updated'
db.delete_column(u'spa_purchaselink', 'object_updated')
# Deleting field 'ChatMessage.object_created'
db.delete_column(u'spa_chatmessage', 'object_created')
# Deleting field 'ChatMessage.object_updated'
db.delete_column(u'spa_chatmessage', 'object_updated')
# Deleting field 'Comment.object_created'
db.delete_column(u'spa_comment', 'object_created')
# Deleting field 'Comment.object_updated'
db.delete_column(u'spa_comment', 'object_updated')
# Deleting field 'Notification.object_created'
db.delete_column(u'spa_notification', 'object_created')
# Deleting field 'Notification.object_updated'
db.delete_column(u'spa_notification', 'object_updated')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'schedule.calendar': {
'Meta': {'object_name': 'Calendar'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '200'})
},
'schedule.event': {
'Meta': {'object_name': 'Event'},
'calendar': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['schedule.Calendar']", 'null': 'True', 'blank': 'True'}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'creator'", 'null': 'True', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'end': ('django.db.models.fields.DateTimeField', [], {}),
'end_recurring_period': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'rule': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['schedule.Rule']", 'null': 'True', 'blank': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_on': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'schedule.rule': {
'Meta': {'object_name': 'Rule'},
'description': ('django.db.models.fields.TextField', [], {}),
'frequency': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'params': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'spa._lookup': {
'Meta': {'object_name': '_Lookup'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'})
},
'spa.activity': {
'Meta': {'object_name': 'Activity'},
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['spa.UserProfile']", 'null': 'True', 'blank': 'True'})
},
'spa.activitycomment': {
'Meta': {'object_name': 'ActivityComment', '_ormbases': ['spa.Activity']},
u'activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa.Activity']", 'unique': 'True', 'primary_key': 'True'}),
'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'activity_comments'", 'to': "orm['spa.Mix']"})
},
'spa.activitydownload': {
'Meta': {'object_name': 'ActivityDownload', '_ormbases': ['spa.Activity']},
u'activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa.Activity']", 'unique': 'True', 'primary_key': 'True'}),
'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'activity_downloads'", 'to': "orm['spa.Mix']"})
},
'spa.activityfavourite': {
'Meta': {'object_name': 'ActivityFavourite', '_ormbases': ['spa.Activity']},
u'activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa.Activity']", 'unique': 'True', 'primary_key': 'True'}),
'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'activity_favourites'", 'to': "orm['spa.Mix']"})
},
'spa.activityfollow': {
'Meta': {'object_name': 'ActivityFollow', '_ormbases': ['spa.Activity']},
u'activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa.Activity']", 'unique': 'True', 'primary_key': 'True'}),
'to_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'activity_follow'", 'to': "orm['spa.UserProfile']"})
},
'spa.activitylike': {
'Meta': {'object_name': 'ActivityLike', '_ormbases': ['spa.Activity']},
u'activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa.Activity']", 'unique': 'True', 'primary_key': 'True'}),
'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'activity_likes'", 'to': "orm['spa.Mix']"})
},
'spa.activityplay': {
'Meta': {'object_name': 'ActivityPlay', '_ormbases': ['spa.Activity']},
u'activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa.Activity']", 'unique': 'True', 'primary_key': 'True'}),
'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'activity_plays'", 'to': "orm['spa.Mix']"})
},
'spa.chatmessage': {
'Meta': {'object_name': 'ChatMessage'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'chat_messages'", 'null': 'True', 'to': "orm['spa.UserProfile']"})
},
'spa.comment': {
'Meta': {'object_name': 'Comment'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mix': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comments'", 'null': 'True', 'to': "orm['spa.Mix']"}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'time_index': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'spa.genre': {
'Meta': {'object_name': 'Genre'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
},
'spa.label': {
'Meta': {'object_name': 'Label'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'})
},
'spa.mix': {
'Meta': {'object_name': 'Mix'},
'description': ('django.db.models.fields.TextField', [], {}),
'download_allowed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'duration': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'favourites': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'favourites'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['spa.UserProfile']"}),
'filetype': ('django.db.models.fields.CharField', [], {'default': "'mp3'", 'max_length': '10'}),
'genres': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['spa.Genre']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_featured': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'likes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'likes'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['spa.UserProfile']"}),
'mix_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '1024', 'blank': 'True'}),
'mp3tags_updated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'uid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '38', 'blank': 'True'}),
'upload_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'mixes'", 'to': "orm['spa.UserProfile']"}),
'waveform_generated': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'spa.notification': {
'Meta': {'object_name': 'Notification'},
'accepted_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'from_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'notifications'", 'null': 'True', 'to': "orm['spa.UserProfile']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notification_html': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'notification_text': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'notification_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'target': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'to_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'to_notications'", 'to': "orm['spa.UserProfile']"}),
'verb': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'})
},
'spa.playlist': {
'Meta': {'object_name': 'Playlist'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mixes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['spa.Mix']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'playlists'", 'to': "orm['spa.UserProfile']"})
},
'spa.purchaselink': {
'Meta': {'object_name': 'PurchaseLink'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'track': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'purchase_link'", 'to': "orm['spa.Tracklist']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'spa.recurrence': {
'Meta': {'object_name': 'Recurrence', '_ormbases': ['spa._Lookup']},
u'_lookup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa._Lookup']", 'unique': 'True', 'primary_key': 'True'})
},
'spa.release': {
'Meta': {'object_name': 'Release'},
'embed_code': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'release_artist': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'release_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)'}),
'release_description': ('django.db.models.fields.TextField', [], {}),
'release_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'release_label': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['spa.Label']"}),
'release_title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['spa.UserProfile']"})
},
'spa.releaseaudio': {
'Meta': {'object_name': 'ReleaseAudio'},
'description': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_audio'", 'null': 'True', 'to': "orm['spa.Release']"})
},
'spa.show': {
'Meta': {'object_name': 'Show', '_ormbases': ['schedule.Event']},
u'event_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['schedule.Event']", 'unique': 'True', 'primary_key': 'True'}),
'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'show'", 'to': "orm['spa.Mix']"})
},
'spa.tracklist': {
'Meta': {'object_name': 'Tracklist'},
'artist': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'index': ('django.db.models.fields.SmallIntegerField', [], {}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tracklist'", 'to': "orm['spa.Mix']"}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'remixer': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'timeindex': ('django.db.models.fields.TimeField', [], {'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'spa.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'activity_sharing': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'activity_sharing_networks': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'avatar_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '1024', 'blank': 'True'}),
'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'social'", 'max_length': '15'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),
'display_name': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}),
'following': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'followers'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['spa.UserProfile']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_known_session': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'default': 'None', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'userprofile'", 'unique': 'True', 'to': u"orm['auth.User']"})
},
'spa.venue': {
'Meta': {'object_name': 'Venue'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'venue_address': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'venue_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'venue_name': ('django.db.models.fields.CharField', [], {'max_length': '250'})
}
}
complete_apps = ['spa']
|
bsd-2-clause
| -8,021,846,938,036,338,000 | 71.361423 | 205 | 0.571429 | false | 3.608517 | false | false | false |
denys-duchier/kivy
|
kivy/uix/label.py
|
1
|
25250
|
'''Label
=====
The :class:`Label` widget is for rendering text. It supports ascii and unicode
strings::
# hello world text
l = Label(text='Hello world')
# unicode text; can only display glyphs that are available in the font
l = Label(text=u'Hello world ' + unichr(2764))
# multiline text
l = Label(text='Multi\\nLine')
# size
l = Label(text='Hello world', font_size='20sp')
Text alignment and wrapping
---------------------------
The :class:`Label` has :attr:`halign` and :attr:`valign` properties to
control the alignment of its text, but by default these have no effect
and the text is always centered within the Label. This is for
efficiency; the text is aligned only within the pixel drawing of the
characters, which should normally be as small as possible to minimise
the number of pixels pushed to the GPU. By default, this text image is
only just large enough to contain the characters and is positioned in the
center of the Label.
In order for the alignment properties to take effect, the simplest
solution is to set the :attr:`text_size`, which specifies the size of
the bounding box within which text is aligned. For instance, the
following code binds this size to the size of the Label, so text will
be aligned within the widget bounds. This will also automatically wrap
the text of the Label to remain within this area.
.. code-block:: python
# in Python
from kivy.uix.label import Label
class MyLabel(Label):
pass
# in kv
<MyLabel>:
text_size: self.size
halign: 'right'
valign: 'middle'
Markup text
-----------
.. versionadded:: 1.1.0
You can change the style of the text using :doc:`api-kivy.core.text.markup`.
The syntax is similar to the bbcode syntax but only the inline styling is
allowed::
# hello world with world in bold
l = Label(text='Hello [b]World[/b]', markup=True)
# hello in red, world in blue
l = Label(text='[color=ff3333]Hello[/color][color=3333ff]World[/color]',
markup = True)
If you need to escape the markup from the current text, use
:func:`kivy.utils.escape_markup`::
text = 'This is an important message [1]'
l = Label(text='[b]' + escape_markup(text) + '[/b]', markup=True)
The following tags are available:
``[b][/b]``
Activate bold text
``[i][/i]``
Activate italic text
``[font=<str>][/font]``
Change the font
``[size=<integer>][/size]``
Change the font size
``[color=#<color>][/color]``
Change the text color
``[ref=<str>][/ref]``
Add an interactive zone. The reference + bounding box inside the
reference will be available in :attr:`Label.refs`
``[anchor=<str>]``
Put an anchor in the text. You can get the position of your anchor within
the text with :attr:`Label.anchors`
``[sub][/sub]``
Display the text at a subscript position relative to the text before it.
``[sup][/sup]``
Display the text at a superscript position relative to the text before it.
If you want to render the markup text with a [ or ] or & character, you need to
escape them. We created a simple syntax::
[ -> &bl;
] -> &br;
& -> &
Then you can write::
"[size=24]Hello &bl;World&bt;[/size]"
Interactive Zone in Text
------------------------
.. versionadded:: 1.1.0
You can now have definable "links" using text markup. The idea is to be able
to detect when the user clicks on part of the text and to react.
The tag ``[ref=xxx]`` is used for that.
In this example, we are creating a reference on the word "World". When
this word is clicked, the function ``print_it`` will be called with the
name of the reference::
def print_it(instance, value):
print('User clicked on', value)
widget = Label(text='Hello [ref=world]World[/ref]', markup=True)
widget.bind(on_ref_press=print_it)
For prettier rendering, you could add a color for the reference. Replace the
``text=`` in the previous example with::
'Hello [ref=world][color=0000ff]World[/color][/ref]'
Usage example
-------------
The following example marks the anchors and references contained in a label::
from kivy.app import App
from kivy.uix.label import Label
from kivy.clock import Clock
from kivy.graphics import Color, Rectangle
class TestApp(App):
@staticmethod
def get_x(label, ref_x):
""" Return the x value of the ref/anchor relative to the canvas """
return label.center_x - label.texture_size[0] * 0.5 + ref_x
@staticmethod
def get_y(label, ref_y):
""" Return the y value of the ref/anchor relative to the canvas """
# Note the inversion of direction, as y values start at the top of
# the texture and increase downwards
return label.center_y + label.texture_size[1] * 0.5 - ref_y
def show_marks(self, label):
# Indicate the position of the anchors with a red top marker
for name, anc in label.anchors.items():
with label.canvas:
Color(1, 0, 0)
Rectangle(pos=(self.get_x(label, anc[0]),
self.get_y(label, anc[1])),
size=(3, 3))
# Draw a green surround around the refs. Note the sizes y inversion
for name, boxes in label.refs.items():
for box in boxes:
with label.canvas:
Color(0, 1, 0, 0.25)
Rectangle(pos=(self.get_x(label, box[0]),
self.get_y(label, box[1])),
size=(box[2] - box[0],
box[1] - box[3]))
def build(self):
label = Label(
text='[anchor=a]a\\nChars [anchor=b]b\\n[ref=myref]ref[/ref]',
markup=True)
Clock.schedule_once(lambda dt: self.show_marks(label), 1)
return label
TestApp().run()
'''
__all__ = ('Label', )
from functools import partial
from kivy.clock import Clock
from kivy.uix.widget import Widget
from kivy.core.text import Label as CoreLabel
from kivy.core.text.markup import MarkupLabel as CoreMarkupLabel
from kivy.properties import StringProperty, OptionProperty, \
NumericProperty, BooleanProperty, ReferenceListProperty, \
ListProperty, ObjectProperty, DictProperty
from kivy.utils import get_hex_from_color
class Label(Widget):
'''Label class, see module documentation for more information.
:Events:
`on_ref_press`
Fired when the user clicks on a word referenced with a
``[ref]`` tag in a text markup.
'''
__events__ = ['on_ref_press']
_font_properties = ('text', 'font_size', 'font_name', 'bold', 'italic',
'halign', 'valign', 'padding_x', 'padding_y',
'text_size', 'shorten', 'mipmap', 'markup',
'line_height', 'max_lines', 'strip', 'shorten_from',
'split_str', 'unicode_errors')
def __init__(self, **kwargs):
self._trigger_texture = Clock.create_trigger(self.texture_update, -1)
self._trigger_markup_color = partial(self._trigger_texture_update, 'color')
super(Label, self).__init__(**kwargs)
# bind all the property for recreating the texture
d = Label._font_properties
fbind = self.fast_bind
update = self._trigger_texture_update
for x in d:
fbind(x, update, x)
self._label = None
self._create_label()
# force the texture creation
self._trigger_texture()
def on_markup(self, inst, markup):
if markup:
self.fast_bind('color', self._trigger_markup_color)
else:
self.fast_unbind('color', self._trigger_markup_color)
def _create_label(self):
# create the core label class according to markup value
if self._label is not None:
cls = self._label.__class__
else:
cls = None
markup = self.markup
if (markup and cls is not CoreMarkupLabel) or \
(not markup and cls is not CoreLabel):
# markup have change, we need to change our rendering method.
d = Label._font_properties
dkw = dict(list(zip(d, [getattr(self, x) for x in d])))
if markup:
self._label = CoreMarkupLabel(**dkw)
else:
self._label = CoreLabel(**dkw)
def _trigger_texture_update(self, name=None, source=None, value=None):
# check if the label core class need to be switch to a new one
if name == 'markup':
self._create_label()
if source:
if name == 'text':
self._label.text = value
elif name == 'text_size':
self._label.usersize = value
elif name == 'font_size':
self._label.options[name] = value
else:
self._label.options[name] = value
self._trigger_texture()
def texture_update(self, *largs):
'''Force texture recreation with the current Label properties.
After this function call, the :attr:`texture` and :attr:`texture_size`
will be updated in this order.
'''
mrkup = self._label.__class__ is CoreMarkupLabel
self.texture = None
if (not self._label.text or (self.halign[-1] == 'y' or self.strip) and
not self._label.text.strip()):
self.texture_size = (0, 0)
if mrkup:
self.refs, self._label._refs = {}, {}
self.anchors, self._label._anchors = {}, {}
else:
if mrkup:
text = self.text
# we must strip here, otherwise, if the last line is empty,
# markup will retain the last empty line since it only strips
# line by line within markup
if self.halign[-1] == 'y' or self.strip:
text = text.strip()
self._label.text = ''.join(('[color=',
get_hex_from_color(self.color),
']', text, '[/color]'))
self._label.refresh()
# force the rendering to get the references
if self._label.texture:
self._label.texture.bind()
self.refs = self._label.refs
self.anchors = self._label.anchors
else:
self._label.refresh()
texture = self._label.texture
if texture is not None:
self.texture = self._label.texture
self.texture_size = list(self.texture.size)
def on_touch_down(self, touch):
if super(Label, self).on_touch_down(touch):
return True
if not len(self.refs):
return False
tx, ty = touch.pos
tx -= self.center_x - self.texture_size[0] / 2.
ty -= self.center_y - self.texture_size[1] / 2.
ty = self.texture_size[1] - ty
for uid, zones in self.refs.items():
for zone in zones:
x, y, w, h = zone
if x <= tx <= w and y <= ty <= h:
self.dispatch('on_ref_press', uid)
return True
return False
def on_ref_press(self, ref):
pass
#
# Properties
#
disabled_color = ListProperty([1, 1, 1, .3])
'''Text color, in the format (r, g, b, a)
.. versionadded:: 1.8.0
:attr:`disabled_color` is a :class:`~kivy.properties.ListProperty` and
defaults to [1, 1, 1, .5].
'''
text = StringProperty('')
'''Text of the label.
Creation of a simple hello world::
widget = Label(text='Hello world')
If you want to create the widget with an unicode string, use::
widget = Label(text=u'My unicode string')
:attr:`text` is a :class:`~kivy.properties.StringProperty` and defaults to
''.
'''
text_size = ListProperty([None, None])
'''By default, the label is not constrained to any bounding box.
You can set the size constraint of the label with this property.
The text will autoflow into the constrains. So although the font size
will not be reduced, the text will be arranged to fit into the box as best
as possible, with any text still outside the box clipped.
This sets and clips :attr:`texture_size` to text_size if not None.
.. versionadded:: 1.0.4
For example, whatever your current widget size is, if you want the label to
be created in a box with width=200 and unlimited height::
Label(text='Very big big line', text_size=(200, None))
.. note::
This text_size property is the same as the
:attr:`~kivy.core.text.Label.usersize` property in the
:class:`~kivy.core.text.Label` class. (It is named size= in the
constructor.)
:attr:`text_size` is a :class:`~kivy.properties.ListProperty` and
defaults to (None, None), meaning no size restriction by default.
'''
font_name = StringProperty('DroidSans')
'''Filename of the font to use. The path can be absolute or relative.
Relative paths are resolved by the :func:`~kivy.resources.resource_find`
function.
.. warning::
Depending of your text provider, the font file can be ignored. However,
you can mostly use this without problems.
If the font used lacks the glyphs for the particular language/symbols
you are using, you will see '[]' blank box characters instead of the
actual glyphs. The solution is to use a font that has the glyphs you
need to display. For example, to display |unicodechar|, use a font such
as freesans.ttf that has the glyph.
.. |unicodechar| image:: images/unicode-char.png
:attr:`font_name` is a :class:`~kivy.properties.StringProperty` and
defaults to 'DroidSans'.
'''
font_size = NumericProperty('15sp')
'''Font size of the text, in pixels.
:attr:`font_size` is a :class:`~kivy.properties.NumericProperty` and
defaults to 15sp.
'''
line_height = NumericProperty(1.0)
'''Line Height for the text. e.g. line_height = 2 will cause the spacing
between lines to be twice the size.
:attr:`line_height` is a :class:`~kivy.properties.NumericProperty` and
defaults to 1.0.
.. versionadded:: 1.5.0
'''
bold = BooleanProperty(False)
'''Indicates use of the bold version of your font.
.. note::
Depending of your font, the bold attribute may have no impact on your
text rendering.
:attr:`bold` is a :class:`~kivy.properties.BooleanProperty` and defaults to
False.
'''
italic = BooleanProperty(False)
'''Indicates use of the italic version of your font.
.. note::
Depending of your font, the italic attribute may have no impact on your
text rendering.
:attr:`italic` is a :class:`~kivy.properties.BooleanProperty` and defaults
to False.
'''
padding_x = NumericProperty(0)
'''Horizontal padding of the text inside the widget box.
:attr:`padding_x` is a :class:`~kivy.properties.NumericProperty` and
defaults to 0.
.. versionchanged:: 1.9.0
`padding_x` has been fixed to work as expected.
In the past, the text was padded by the negative of its values.
'''
padding_y = NumericProperty(0)
'''Vertical padding of the text inside the widget box.
:attr:`padding_y` is a :class:`~kivy.properties.NumericProperty` and
defaults to 0.
.. versionchanged:: 1.9.0
`padding_y` has been fixed to work as expected.
In the past, the text was padded by the negative of its values.
'''
padding = ReferenceListProperty(padding_x, padding_y)
'''Padding of the text in the format (padding_x, padding_y)
:attr:`padding` is a :class:`~kivy.properties.ReferenceListProperty` of
(:attr:`padding_x`, :attr:`padding_y`) properties.
'''
halign = OptionProperty('left', options=['left', 'center', 'right',
'justify'])
'''Horizontal alignment of the text.
:attr:`halign` is an :class:`~kivy.properties.OptionProperty` and
defaults to 'left'. Available options are : left, center, right and
justify.
.. warning::
This doesn't change the position of the text texture of the Label
(centered), only the position of the text in this texture. You probably
want to bind the size of the Label to the :attr:`texture_size` or set a
:attr:`text_size`.
.. versionchanged:: 1.6.0
A new option was added to :attr:`halign`, namely `justify`.
'''
valign = OptionProperty('bottom', options=['bottom', 'middle', 'top'])
'''Vertical alignment of the text.
:attr:`valign` is an :class:`~kivy.properties.OptionProperty` and defaults
to 'bottom'. Available options are : bottom, middle and top.
.. warning::
This doesn't change the position of the text texture of the Label
(centered), only the position of the text within this texture. You
probably want to bind the size of the Label to the :attr:`texture_size`
or set a :attr:`text_size` to change this behavior.
'''
color = ListProperty([1, 1, 1, 1])
'''Text color, in the format (r, g, b, a)
:attr:`color` is a :class:`~kivy.properties.ListProperty` and defaults to
[1, 1, 1, 1].
'''
texture = ObjectProperty(None, allownone=True)
'''Texture object of the text.
The text is rendered automatically when a property changes. The OpenGL
texture created in this operation is stored in this property. You can use
this :attr:`texture` for any graphics elements.
Depending on the texture creation, the value will be a
:class:`~kivy.graphics.texture.Texture` or
:class:`~kivy.graphics.texture.TextureRegion` object.
.. warning::
The :attr:`texture` update is scheduled for the next frame. If you need
the texture immediately after changing a property, you have to call
the :meth:`texture_update` method before accessing :attr:`texture`::
l = Label(text='Hello world')
# l.texture is good
l.font_size = '50sp'
# l.texture is not updated yet
l.texture_update()
# l.texture is good now.
:attr:`texture` is an :class:`~kivy.properties.ObjectProperty` and defaults
to None.
'''
texture_size = ListProperty([0, 0])
'''Texture size of the text. The size is determined by the font size and
text. If :attr:`text_size` is [None, None], the texture will be the size
required to fit the text, otherwise it's clipped to fit :attr:`text_size`.
When :attr:`text_size` is [None, None], one can bind to texture_size
and rescale it proportionally to fit the size of the label in order to
make the text fit maximally in the label.
.. warning::
The :attr:`texture_size` is set after the :attr:`texture`
property. If you listen for changes to :attr:`texture`,
:attr:`texture_size` will not be up-to-date in your callback.
Bind to :attr:`texture_size` instead.
'''
mipmap = BooleanProperty(False)
'''Indicates whether OpenGL mipmapping is applied to the texture or not.
Read :ref:`mipmap` for more information.
.. versionadded:: 1.0.7
:attr:`mipmap` is a :class:`~kivy.properties.BooleanProperty` and defaults
to False.
'''
shorten = BooleanProperty(False)
'''
Indicates whether the label should attempt to shorten its textual contents
as much as possible if a :attr:`text_size` is given. Setting this to True
without an appropriately set :attr:`text_size` will lead to unexpected
results.
:attr:`shorten_from` and :attr:`split_str` control the direction from
which the :attr:`text` is split, as well as where in the :attr:`text` we
are allowed to split.
:attr:`shorten` is a :class:`~kivy.properties.BooleanProperty` and defaults
to False.
'''
shorten_from = OptionProperty('center', options=['left', 'center',
'right'])
'''The side from which we should shorten the text from, can be left,
right, or center.
For example, if left, the ellipsis will appear towards the left side and we
will display as much text starting from the right as possible. Similar to
:attr:`shorten`, this option only applies when :attr:`text_size` [0] is
not None, In this case, the string is shortened to fit within the specified
width.
.. versionadded:: 1.9.0
:attr:`shorten_from` is a :class:`~kivy.properties.OptionProperty` and
defaults to `center`.
'''
split_str = StringProperty('')
'''The string used to split the :attr:`text` while shortening the string
when :attr:`shorten` is True.
For example, if it's a space, the string will be broken into words and as
many whole words that can fit into a single line will be displayed. If
:attr:`shorten_from` is the empty string, `''`, we split on every character
fitting as much text as possible into the line.
.. versionadded:: 1.9.0
:attr:`split_str` is a :class:`~kivy.properties.StringProperty` and
defaults to `''` (the empty string).
'''
unicode_errors = OptionProperty(
'replace', options=('strict', 'replace', 'ignore'))
'''How to handle unicode decode errors. Can be `'strict'`, `'replace'` or
`'ignore'`.
.. versionadded:: 1.9.0
:attr:`unicode_errors` is an :class:`~kivy.properties.OptionProperty` and
defaults to `'replace'`.
'''
markup = BooleanProperty(False)
'''
.. versionadded:: 1.1.0
If True, the text will be rendered using the
:class:`~kivy.core.text.markup.MarkupLabel`: you can change the
style of the text using tags. Check the
:doc:`api-kivy.core.text.markup` documentation for more information.
:attr:`markup` is a :class:`~kivy.properties.BooleanProperty` and defaults
to False.
'''
refs = DictProperty({})
'''
.. versionadded:: 1.1.0
List of ``[ref=xxx]`` markup items in the text with the bounding box of
all the words contained in a ref, available only after rendering.
For example, if you wrote::
Check out my [ref=hello]link[/ref]
The refs will be set with::
{'hello': ((64, 0, 78, 16), )}
The references marked "hello" have a bounding box at (x1, y1, x2, y2).
These co-ordinates are relative to the top left corner of the text, with
the y value increasing downwards. You can define multiple refs with the same
name: each occurence will be added as another (x1, y1, x2, y2) tuple to
this list.
The current Label implementation uses these references if they exist in
your markup text, automatically doing the collision with the touch and
dispatching an `on_ref_press` event.
You can bind a ref event like this::
def print_it(instance, value):
print('User click on', value)
widget = Label(text='Hello [ref=world]World[/ref]', markup=True)
widget.on_ref_press(print_it)
.. note::
This works only with markup text. You need :attr:`markup` set to
True.
'''
anchors = DictProperty({})
'''
.. versionadded:: 1.1.0
Position of all the ``[anchor=xxx]`` markup in the text.
These co-ordinates are relative to the top left corner of the text, with
the y value increasing downwards. Anchors names should be unique and only
the first occurence of any duplicate anchors will be recorded.
You can place anchors in your markup text as follows::
text = """
[anchor=title1][size=24]This is my Big title.[/size]
[anchor=content]Hello world
"""
Then, all the ``[anchor=]`` references will be removed and you'll get all
the anchor positions in this property (only after rendering)::
>>> widget = Label(text=text, markup=True)
>>> widget.texture_update()
>>> widget.anchors
{"content": (20, 32), "title1": (20, 16)}
.. note::
This works only with markup text. You need :attr:`markup` set to
True.
'''
max_lines = NumericProperty(0)
'''Maximum number of lines to use, defaults to 0, which means unlimited.
Please note that :attr:`shorten` take over this property. (with
shorten, the text is always one line.)
.. versionadded:: 1.8.0
:attr:`max_lines` is a :class:`~kivy.properties.NumericProperty` and
defaults to 0.
'''
strip = BooleanProperty(False)
'''Whether leading and trailing spaces and newlines should be stripped from
each displayed line. If True, every line will start at the right or left
edge, depending on :attr:`halign`. If :attr:`halign` is `justify` it is
implicitly True.
.. versionadded:: 1.9.0
:attr:`strip` is a :class:`~kivy.properties.BooleanProperty` and
defaults to False.
'''
|
mit
| -6,323,048,173,125,887,000 | 33.400545 | 83 | 0.617465 | false | 4.04453 | false | false | false |
Weasyl/weasyl
|
weasyl/shout.py
|
1
|
4702
|
import arrow
from libweasyl import staff
from weasyl import define as d
from weasyl import frienduser
from weasyl import ignoreuser
from weasyl import macro as m
from weasyl import media
from weasyl import welcome
from weasyl.comment import thread
from weasyl.error import WeasylError
def select(userid, ownerid, limit=None, staffnotes=False):
statement = ["""
SELECT
sh.commentid, sh.parentid, sh.userid, pr.username,
sh.content, sh.unixtime, sh.settings, sh.hidden_by
FROM comments sh
INNER JOIN profile pr USING (userid)
WHERE sh.target_user = %i
AND sh.settings %s~ 's'
""" % (ownerid, "" if staffnotes else "!")]
# moderators get to view hidden comments
if userid not in staff.MODS:
statement.append(" AND sh.settings !~ 'h'")
if userid:
statement.append(m.MACRO_IGNOREUSER % (userid, "sh"))
statement.append(" ORDER BY sh.commentid")
query = d.execute("".join(statement))
result = thread(query, reverse_top_level=True)
if limit:
result = result[:limit]
media.populate_with_user_media(result)
return result
def count(ownerid, staffnotes=False):
db = d.connect()
sh = d.meta.tables['comments']
op = '~' if staffnotes else '!~'
q = (
d.sa.select([d.sa.func.count()])
.select_from(sh)
.where(sh.c.settings.op(op)('s'))
.where(sh.c.target_user == ownerid))
(ret,), = db.execute(q)
return ret
def insert(userid, target_user, parentid, content, staffnotes):
# Check invalid content
if not content:
raise WeasylError("commentInvalid")
elif not target_user or (not d.is_vouched_for(target_user) and not staffnotes):
raise WeasylError("Unexpected")
# Determine parent userid
if parentid:
parentuserid = d.engine.scalar(
"SELECT userid FROM comments WHERE commentid = %(parent)s",
parent=parentid,
)
if parentuserid is None:
raise WeasylError("shoutRecordMissing")
else:
parentuserid = None
# Check permissions
if userid not in staff.MODS:
if ignoreuser.check(target_user, userid):
raise WeasylError("pageOwnerIgnoredYou")
elif ignoreuser.check(userid, target_user):
raise WeasylError("youIgnoredPageOwner")
elif ignoreuser.check(parentuserid, userid):
raise WeasylError("replyRecipientIgnoredYou")
elif ignoreuser.check(userid, parentuserid):
raise WeasylError("youIgnoredReplyRecipient")
is_banned, _ = d.get_login_settings(target_user)
profile_config = d.get_config(target_user)
if is_banned or "w" in profile_config or "x" in profile_config and not frienduser.check(userid, target_user):
raise WeasylError("insufficientActionPermissions")
# Create comment
settings = 's' if staffnotes else ''
co = d.meta.tables['comments']
db = d.connect()
commentid = db.scalar(
co.insert()
.values(userid=userid, target_user=target_user, parentid=parentid or None, content=content,
unixtime=arrow.utcnow(), settings=settings)
.returning(co.c.commentid))
# Create notification
if parentid and userid != parentuserid:
if not staffnotes or parentuserid in staff.MODS:
welcome.shoutreply_insert(userid, commentid, parentuserid, parentid, staffnotes)
elif not staffnotes and target_user and userid != target_user:
welcome.shout_insert(userid, commentid, otherid=target_user)
d.metric('increment', 'shouts')
return commentid
def remove(userid, commentid=None):
query = d.engine.execute(
"SELECT userid, target_user, settings FROM comments WHERE commentid = %(id)s AND settings !~ 'h'",
id=commentid,
).first()
if not query or ('s' in query[2] and userid not in staff.MODS):
raise WeasylError("shoutRecordMissing")
if userid != query[1] and userid not in staff.MODS:
if userid != query[0]:
raise WeasylError("InsufficientPermissions")
# user is commenter
replies = d.execute(
"SELECT commentid FROM comments WHERE parentid = %d", [commentid])
if replies:
# a commenter cannot remove their comment if it has replies
raise WeasylError("InsufficientPermissions")
# remove notifications
welcome.comment_remove(commentid, 'shout')
d._page_header_info.invalidate(userid)
# hide comment
d.execute("UPDATE comments SET settings = settings || 'h', hidden_by = %i WHERE commentid = %i", [userid, commentid])
return query[1]
|
apache-2.0
| -7,151,868,114,302,378,000 | 32.112676 | 121 | 0.64866 | false | 3.746614 | false | false | false |
alsmirn/adist
|
extinction/tabular.py
|
1
|
3610
|
"""
Copyright (c) 2009, Alexey Smirnov
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the Saint-Petersburg State University nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY ALEXEY SMIRNOV ''AS IS'' AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL ALEXEY SMIRNOV BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import os
STD_V = {}
STD_BV = {}
# Little bit modernized table of values from:
# http://vizier.cfa.harvard.edu/viz-bin/Cat?J/PAZh/34/21#sRM2.1
__file_with_standards = os.path.join(os.path.dirname(__file__),
"standards.dat")
try:
file = open(__file_with_standards, 'r')
except IOError:
import sys
print "File %s does not exists." % __file_with_standards
print __file_with_standards
sys.exit(True)
for row in [line.split()[1:] for line in file]:
do_key = lambda lumin: "%s%d" % (row[0], lumin)
for lumin in (1, 3, 5):
key = do_key(lumin)
STD_V[key], STD_BV[key] = map(float, (row[lumin], row[lumin+1]))
def av_tabular(t_class, s_class, l_class, b_v):
"""
@param t_class: Temperature class, from list 'OBAFGKM'.
@param s_class: Temperature subclass, from 0 to 9.
@param l_class: Luminosity class, like 1, 3 or 5.
@param b_v: B-V value.
@return a_v: full extinction value in visual band.
@author: Alexey Smirnov
@note: computations are based on next paper results: "Inaccuracies in the
spectral classification of stars from the Tycho-2 Spectral Type Catalogue",
Tsvetkov, A. S.; Popov, A. V.; Smirnov, A. A.,
Astronomy Letters, Volume 34, Issue 1, pp.17-27
"""
t_class_basis = tuple('OBAFGKM')
s_class_basis = range(10)
l_class_basis = (1, 3, 5)
if t_class not in t_class_basis:
raise NameError("Temperature class %s is not in range %s" %
(t_class, t_class_basis))
if s_class not in s_class_basis:
raise NameError("Temperature subclass %s is not in range %s" %
(s_class, s_class_basis))
if l_class not in l_class_basis:
raise NameError("Luminosity class %s is not in range %s" %
(l_class, l_class_basis))
do_key = lambda *args: "%s%d%d" % args
key = do_key(t_class, s_class, l_class)
e_b_v = b_v - STD_BV[key]
r_const = 3.30 + 0.28 * STD_BV[key] + 0.4 * e_b_v
a_v = r_const * e_b_v
return a_v
|
bsd-3-clause
| -2,222,819,617,115,563,300 | 40.022727 | 80 | 0.667313 | false | 3.438095 | false | false | false |
PedroMDuarte/thesis-hubbard-lda_evap
|
qmc.py
|
1
|
16230
|
"""
This file provides a way to obtain thermodynamic quantities from an
interpolation of available QMC solutions
"""
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
from matplotlib import rc
rc('font', **{'family':'serif'})
rc('text', usetex=True)
import glob
import os
import ldaconf
basedir = ldaconf.basedir
from scipy.spatial import Delaunay
from scipy.interpolate import CloughTocher2DInterpolator, LinearNDInterpolator
from scipy.interpolate.interpnd import _ndim_coords_from_arrays
import logging
# create logger
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
#logger.disabled = True
def get_qty_mu( dat, mu, MUCOL, COL, **kwargs ):
# Control the interpolation between availble
# density points here
#~qtyinterp = 'nearest'
qtyinterp = 'linear'
msg = kwargs.get('msg', None)
DENSCOL = 1
ENTRCOL = 2
SPICOL = 3
CMPRCOL = 4
if COL == SPICOL:
default_minus = 1.0
default_plus = 0.0
elif COL == ENTRCOL:
default_minus = 0.0
default_plus = 0.0
elif COL == DENSCOL:
default_minus = 0.0
default_plus = 2.0
elif COL == CMPRCOL:
default_minus = 0.0
default_plus = 0.0
else:
raise ValueError("Column not defined: COL = {:d}".format(COL) )
CAREFUL = kwargs.get('careful', True)
if CAREFUL and (mu < -10. or mu > 60.):
CAREFUL = False
if qtyinterp == 'nearest':
index = np.argmin( np.abs(dat[:, MUCOL] - mu ))
qtyresult = dat[index,COL]
else:
# find the two closest chemical potentials that
# stride the point
mudat = dat[:,MUCOL]
verbose = False
if np.all(mu < mudat):
qtyresult = default_minus
if COL == DENSCOL or COL == ENTRCOL:
if verbose:
print "QTY=", COL,
print "===>>> mu={:0.2f} ".format(mu), msg
if dat[:,DENSCOL].min() < 0.1 :
qtyresult = default_minus
elif CAREFUL:
return 'out-of-bounds'
#print "====>>> BE CAREFUL : Using default density" + \
# " n=%.2f"%default_minus + \
# " at mu={:0.2f} ".format(mu),
#if msg is not None:
# print msg
#raise ValueError('density error')
elif np.all( mu > mudat):
qtyresult = default_plus
if COL == DENSCOL or COL == ENTRCOL:
if verbose:
print "QTY=", COL,
print "====>>> mu={:0.2f} ".format(mu), msg
if dat[:,DENSCOL].max() > 1.9 :
qtyresult = default_plus
elif CAREFUL:
return 'out-of-bounds'
#print "====>>> BE CAREFUL : Using default density" + \
# " n=%.2f"%default_plus + \
# " at mu={:0.2f} ".format(mu),
#if msg is not None:
# print msg
#raise ValueError('density error')
else:
# since the mu's are ordered we can do:
index0 = np.where( mudat <=mu )[0][-1]
index1 = np.where( mudat > mu )[0][0]
qty0 = dat[ index0, COL ]
qty1 = dat[ index1, COL ]
mu0 = dat[ index0, MUCOL ]
mu1 = dat[ index1, MUCOL ]
qtyresult = qty0 + (mu-mu0) * (qty1-qty0) / (mu1-mu0)
return qtyresult
#print
#print " mu = ", mu
#print "index0 = ", index0
#print "index1 = ", index1
#print "Doing linear interpolation for the qty"
#print " mu0 = ", mu0
#print " mu1 = ", mu1
#print "qty0 = ", qty0
#print "qty1 = ", qty1
#print "qtyresult = ", qtyresult
def find_closest_qmc( U=8, T=0.67, mu=4.0, **kwargs):
"""
This function finds the closest values of U and T in the QMC data
that straddle the values U and T given as arguments.
"""
nUs = 4
nTs = 3
ALLPTS = kwargs.get('ALLPTS', False)
# select which quantity will be returned, options are
# spi and entropy
QTY = kwargs.get('QTY', 'spi' )
if QTY == 'spi':
datadir = basedir + 'COMB_Final_Spi/'
elif QTY == 'entropy':
datadir = basedir + 'COMB_Final_Entr/'
elif QTY == 'density':
datadir = basedir + 'COMB_Final_Spi/'
elif QTY == 'kappa':
datadir = basedir + 'COMB_Final_Spi/'
else:
raise ValueError('Quantity not defined:' + str(QTY) )
fname = datadir + 'U*'
us = [ float(u.split('/U')[-1]) for u in glob.glob(fname) ]
du = [ np.abs(U-u) for u in us ]
index = np.argsort(du)
if ALLPTS:
Ulist0 = range(len(index))
else:
Ulist0 = range( nUs )
us = [ us[index[i]] for i in Ulist0]
#print us
#print du
#print index
#print "Closest Us = ", us
datfiles = []
for u in us:
# For the Spi and Stheta data
if QTY == 'spi' or QTY == 'density' or QTY == 'kappa':
fname = datadir + 'U{U:02d}/T*dat'.format(U=int(u))
fs = sorted(glob.glob(fname))
Ts = [ float(f.split('T')[1].split('.dat')[0]) for f in fs ]
elif QTY=='entropy':
fname = datadir + 'U{U:02d}/S*dat'.format(U=int(u))
fs = sorted(glob.glob(fname))
Ts = [ float(f.split('S')[1].split('.dat')[0]) for f in fs ]
Ts_g = [] ; Ts_l = [];
for t in Ts:
if t > T:
Ts_g.append(t)
else:
Ts_l.append(t)
order_g = np.argsort( [ np.abs( T -t ) for t in Ts_g ] )
order_l = np.argsort( [ np.abs( T -t ) for t in Ts_l ] )
try:
Tpts = [ Ts_g[ order_g[0]] , Ts_l[ order_l[0]] ]
except:
#print
#print "problem adding U=",u, "T=",Ts
#print "available T data does not stride the point"
#print "T =", T
#print "Ts =", Ts
#print "will add nearest Ts nevertheless"
Tpts = [ ]
#raise ValueError("QMC data not available.")
dT = [ np.abs( T - t) for t in Ts ]
index = np.argsort(dT)
if ALLPTS:
Tlist0 = range(len(Ts))
else:
Tlist0 = range( min(nTs , len(Ts)))
for i in Tlist0:
Tnew = Ts[index[i]]
if Tnew not in Tpts:
Tpts.append(Tnew)
for Tpt in Tpts:
index = Ts.index( Tpt )
try:
datfiles.append( [ fs[ index ], u, Ts[index] ] )
except:
print "problem adding U=",u, "T=",Ts
raise
# Need to make sure that selected T values stride both
# sides of the point
#print
#print u
#print Ts
#print dT
#print index
#print fs
# for i in range(min(3, len(Ts))):
# try:
# datfiles.append( [ fs[index[i]], u, Ts[index[i]] ] )
# except:
# print "problem adding U=",u, "T=",Ts
# raise
#
#datfiles.append( [ fs[index[1]], u, Ts[index[1]] ] )
#print datfiles
MUCOL = 0
DENSCOL = 1
ENTRCOL = 2
SPICOL = 3
CMPRCOL = 4
if QTY == 'spi':
COL = SPICOL
elif QTY == 'entropy':
COL = ENTRCOL
elif QTY == 'density':
COL = DENSCOL
elif QTY == 'kappa':
COL = CMPRCOL
msg0 = 'U={:0.2f}, T={:0.2f}'.format(U,T)
logger.debug("number of nearby points = " + str(len(datfiles)))
basedat = []
basedaterr = []
datserr = []
for mm, f in enumerate(datfiles):
# f[0] is the datafile name
# f[1] is U
# f[2] is T
radius = kwargs.get('radius', np.nan )
msg = 'U={:0.2f}, T={:0.2f}'.format(U,T) + \
' mu={:0.2f}, r={:0.2f}, Upt={:0.3f}, Tpt={:0.3f}'.\
format(mu, radius, f[1], f[2])
try:
dat = np.loadtxt(f[0])
spival = get_qty_mu( dat, mu, MUCOL, COL, msg=msg )
# Toggle the false here to plot all of the out of bounds
if spival == 'out-of-bounds':
#spival_symmetry =
logger.info('qty is out of bounds')
basedaterr.append( [f[1], f[2], np.nan] )
datserr.append( dat )
if False:
fig = plt.figure( figsize=(3.5,3.5))
gs = matplotlib.gridspec.GridSpec( 1,1 ,\
left=0.15, right=0.96, bottom=0.12, top=0.88)
ax = fig.add_subplot( gs[0] )
ax.grid(alpha=0.5)
ax.plot( dat[:,MUCOL], dat[:,COL], '.-')
ax.axvline( mu )
ax.text( 0.5, 1.05, msg, ha='center', va='bottom', \
transform=ax.transAxes, fontsize=6.)
if matplotlib.get_backend() == 'agg':
fig.savefig('err_mu_%02d.png'%mm, dpi=200)
plt.close(fig)
else:
plt.show()
plt.close(fig)
continue
else:
basedat.append( [f[1], f[2], spival] )
except Exception as e :
print "Failed to get data from file = ", f
# toggle plotting, not implemented yet:
if True:
fig = plt.figure( figsize=(3.5,3.5))
gs = matplotlib.gridspec.GridSpec( 1,1 ,\
left=0.15, right=0.96, bottom=0.12, top=0.88)
ax = fig.add_subplot( gs[0] )
ax.grid(alpha=0.5)
ax.plot( dat[:,MUCOL], dat[:,COL], '.-')
ax.axvline( mu )
ax.text( 0.5, 1.05, msg, ha='center', va='bottom', \
transform=ax.transAxes)
if matplotlib.get_backend() == 'agg':
fig.savefig('err_mu_%02d.png'%mm, dpi=200)
else:
plt.show()
raise e
logger.debug("number of nearby valid points = " + str(len(basedat)))
error = False
points = None
# MAKE THE TRIANGULATION
basedat = np.array(basedat)
Us = np.unique(basedat[:,0] )
Ts = np.unique(basedat[:,1] )
validTriang = not ( len(Us) ==1 or len(Ts) == 1 )
#print "#Us={:d}, #Ts={:d}".format( len(Us), len(Ts) )
#print msg
if validTriang:
points = _ndim_coords_from_arrays(( basedat[:,0] , basedat[:,1]))
#print "Closest dat = ", basedat
#finterp = CloughTocher2DInterpolator(points, basedat[:,2])
finterp = LinearNDInterpolator( points, basedat[:,2] )
else:
logerr = 'not enough finterp points, QTY=%s'%QTY + '\n' + msg + '\n' \
+ "number of basedat pts = " + str(len(basedat))
print basedat
print "len Us = ", len(Us)
print "len Ts = ", len(Ts)
print "len 'out-of-bounds' = ", len( basedaterr )
if len( basedaterr ) > 0:
for bb, bdaterr in enumerate(basedaterr):
msgbb = 'U={:0.2f}, T={:0.2f}'.format(U,T) +\
' mu={:0.2f}, r={:0.2f}, Upt={:0.3f}, Tpt={:0.3f}'.\
format(mu, radius, basedaterr[bb][0], basedaterr[bb][1] )
daterr = datserr[bb]
fig = plt.figure( figsize=(3.5,3.5))
gs = matplotlib.gridspec.GridSpec( 1,1 ,\
left=0.15, right=0.96, bottom=0.12, top=0.88)
ax = fig.add_subplot( gs[0] )
ax.grid(alpha=0.5)
ax.plot( daterr[:,MUCOL], daterr[:,COL], '.-')
ax.axvline( mu )
ax.text( 0.5, 1.05, msgbb, ha='center', va='bottom', \
transform=ax.transAxes, fontsize=6.)
if matplotlib.get_backend() == 'agg':
fig.savefig('err_mu_%02d.png'%bb, dpi=200)
plt.close(fig)
else:
plt.show()
plt.close(fig)
logger.exception(logerr)
raise ValueError('finterp')
if points == None:
logger.warning( "points object is None" )
if error == False:
try:
result = finterp( U,T )
if np.isnan(result):
if U >= 30.0 and U <=32.5:
result = finterp( 29.99, T )
logger.warning(" qmc: U={:0.1f} replaced to U=29.99 ".\
format(U) )
if np.isnan(result):
raise Exception("\n!!!! qmc: Invalid result, QTY:%s!!!!\n"%QTY \
+ msg0)
except Exception as e:
if kwargs.get('error_nan', False):
return np.nan
else:
error = True
logger.exception("Invalid QTY result!")
if error == False:
if result >= 8. and QTY == 'spi' :
print " Obtained Spi > 8. : U={:0.2f}, T={:0.2f}, mu={:0.2f}".\
format( U, T, mu ),
print " ==> Spi={:0.2f}".format(float(result))
error = True
elif result >=4. and QTY == 'entropy':
print " Obtained Ent > 4. : U={:0.2f}, T={:0.2f}, mu={:0.2f}".\
format( U, T, mu ),
print " ==> Result={:0.2f}".format(float(result))
error = True
logger.debug("error status = " + str(error))
if error or kwargs.get('showinterp',False):
logger.debug("Inside error if statement...")
if kwargs.get('error_nan', False):
pass
#return np.nan
#print "Interp points:"
#print basedat
if len(basedat) == 0 and len(basedaterr) > 0 :
basedaterr = np.array(basedaterr)
Userr = np.unique(basedaterr[:,0] )
Tserr = np.unique(basedaterr[:,1] )
validTriangerr = not ( len(Userr) ==1 or len(Tserr) == 1 )
points = _ndim_coords_from_arrays(( basedaterr[:,0] , basedaterr[:,1]))
tri = Delaunay(points)
else:
tri = Delaunay(points)
fig = plt.figure( figsize=(3.5,3.5))
gs = matplotlib.gridspec.GridSpec( 1,1 ,\
left=0.15, right=0.96, bottom=0.12, top=0.88)
ax = fig.add_subplot( gs[0] )
ax.grid(alpha=0.5)
ax.triplot(points[:,0], points[:,1], tri.simplices.copy())
ax.plot(points[:,0], points[:,1], 'o')
ax.plot( U, T, 'o', ms=6., color='red')
xlim = ax.get_xlim()
dx = (xlim[1]-xlim[0])/10.
ax.set_xlim( xlim[0]-dx, xlim[1]+dx )
ylim = ax.get_ylim()
dy = (ylim[1]-ylim[0])/10.
ax.set_ylim( ylim[0]-dy, ylim[1]+dy )
ax.set_xlabel('$U/t$')
ax.set_ylabel('$T/t$',rotation=0,labelpad=8)
tt = kwargs.get('title_text','')
ax.set_title( tt + '$U/t={:.2f}$'.format(U) + \
',\ \ ' + '$T/t={:.2f}$'.format(T), \
ha='center', va='bottom', fontsize=10)
save_err = kwargs.get('save_err',None)
if save_err is not None:
print "Saving png."
fig.savefig( save_err, dpi=300)
if matplotlib.get_backend() == 'agg':
fig.savefig('err.png', dpi=200)
print "Saved error to err.png"
else:
plt.show()
if not kwargs.get('single', False):
raise ValueError("Could not interpolate using QMC data.")
if ALLPTS:
if 'savepath' in kwargs.keys():
fig.savefig( kwargs.get('savepath',None) , dpi=300)
if error:
raise
return result
|
mit
| -3,425,785,279,829,014,000 | 31.721774 | 83 | 0.463648 | false | 3.43129 | false | false | false |
charityscience/csh-sms
|
tests/jobs/test_text_reminder_job.py
|
1
|
2487
|
import mock
from mock import patch, call
from freezegun import freeze_time
from datetime import datetime
from django.test import TestCase
from tests.fixtures import contact_object
from modules.text_reminder import TextReminder
from jobs import text_reminder_job
FAKE_NOW = datetime(2017, 7, 17, 0, 0)
class TextReminderJobTests(TestCase):
@freeze_time(FAKE_NOW)
@patch("logging.info")
@patch("modules.text_reminder.Texter.send")
def test_remind_two_people(self, mocked_send_text, mocked_logger):
c1 = contact_object(name="Roland",
phone_number="1-111-1111",
date_of_birth="12/6/2017") # 7 days before 6 week appointment
c2 = contact_object(name="Sai",
phone_number="1-112-1111",
date_of_birth="12/6/2017",
language="Hindi")
text_reminder_job.remind_all()
calls = [call(message=TextReminder(c1).get_reminder_msg(),
phone_number=c1.phone_number),
call(message=TextReminder(c2).get_reminder_msg(),
phone_number=c2.phone_number)]
mocked_send_text.assert_has_calls(calls, any_order=True)
self.assertEqual(mocked_send_text.call_count, 2)
@freeze_time(FAKE_NOW)
@patch("logging.info")
@patch("modules.text_reminder.Texter.send")
def test_remind_two_people_but_not_the_cancelled_one(self, mocked_send_text, mocked_logger):
c1 = contact_object(name="Roland",
phone_number="1-111-1111",
date_of_birth="12/6/2017") # 7 days before 6 week appointment
c2 = contact_object(name="Sai",
phone_number="1-112-1111",
date_of_birth="12/6/2017",
language="Hindi")
c3 = contact_object(name="Cancelled",
phone_number="1-111-1112",
date_of_birth="12/6/2017")
c3.cancelled = True
c3.save()
text_reminder_job.remind_all()
calls = [call(message=TextReminder(c1).get_reminder_msg(),
phone_number=c1.phone_number),
call(message=TextReminder(c2).get_reminder_msg(),
phone_number=c2.phone_number)]
mocked_send_text.assert_has_calls(calls, any_order=True)
self.assertEqual(mocked_send_text.call_count, 2)
|
gpl-3.0
| -2,241,891,620,633,596,700 | 44.218182 | 96 | 0.572577 | false | 3.689911 | true | false | false |
hajicj/safire
|
scripts/text_preprocessing_explorer.py
|
1
|
5761
|
#!/usr/bin/env python
import argparse
from copy import deepcopy
import itertools
import logging
import operator
import os
import random
import webbrowser
from safire.data.text_browser import TextBrowser
import safire.utils
from safire.data.image_browser import ImageBrowser
from safire.data.loaders import MultimodalDatasetLoader, IndexLoader, \
ModelLoader, MultimodalShardedDatasetLoader
__author__ = 'Jan Hajic jr.'
##############################################################################
def build_argument_parser():
parser = argparse.ArgumentParser(description=__doc__, add_help=True)
parser.add_argument('-r', '--root', action='store', default=None,
required=True, help='The path to'+
' the directory which is the root of a dataset.' +
' (Will be passed to a Loader as a root.)')
parser.add_argument('-n', '--name', help='The dataset name passed to the' +
' Loader. Has to correspond to the *.vtlist file name.')
parser.add_argument('-l', '--labels', nargs='+',
help='The corpus labels.')
parser.add_argument('--first_n_sentences', type=int, default=10,
help='Display only this many sentences from the '
'beginning of a text.')
parser.add_argument('-v', '--verbose', action='store_true', help='Turn on'+
' INFO logging messages.')
parser.add_argument('--debug', action='store_true', help='Turn on debug '+
'prints.')
return parser
def print_interactive_help():
"""Prints the help message for interactive mode."""
print 'Image index explorer interactive mode help\n' \
'==========================================\n' \
'\n' \
'Commands:\n' \
' h ... help\n' \
' c N ... compare representations for N-th document in vtlist\n' \
' q|e ... exit (will ask for confirmation)\n' \
'\n' \
'On the \'c\' command, will show two columns of most similar images\n' \
'with the similarities. Will show query image on top.'
def run_interactive(vtlist, raw_corpus, raw_browser,
corpora, browsers, labels):
exit_commands = frozenset(['q', 'e'])
compare_commands = frozenset(['c'])
help_commands = frozenset(['h'])
# Starting settings
highest_scoring = 10
exit_interactive = False
while not exit_interactive:
# Parse command
user_input = raw_input('--> ')
split_input = user_input.split(' ', 1)
if len(split_input) > 1:
command, options = split_input
else:
command = split_input[0]
options = None
# Execute command
if command in help_commands:
print_interactive_help()
continue
elif command in compare_commands:
N = int(options)
text = raw_browser.get_text(N)
btext = text + '\n[end of text]\n'
#print btext
representations = []
for label, browser in zip(labels, browsers):
representation = browser.get_word_representation(N,
highest_scoring=highest_scoring)
# Add headers to representation
representation = [('model', label), ('-----', '-----')] \
+ representation
representations.append(representation)
all_representations = list(itertools.chain(*representations))
# ???
formatted_repr = raw_browser.format_representation(
all_representations, n_cols=len(representations))
output = text + '\n\n' + formatted_repr
raw_browser.text_to_window(output)
elif command in exit_commands:
confirmation = raw_input('-[y/n]-> ')
if confirmation in exit_commands or confirmation == '' \
or confirmation == 'y':
exit_interactive = True
continue
else:
print 'Invalid command %s' % command
def main(args):
logging.info('Initializing loaders with root %s, name %s' % (
args.root, args.name))
dloader = MultimodalShardedDatasetLoader(args.root, args.name)
vtlist_file = dloader.layout.vtlist
with open(os.path.join(args.root, vtlist_file)) as vtlist_handle:
vtlist = [ l.strip() for l in vtlist_handle ]
# The corpus and browser used for displaying the raw texts
raw_text_corpus = dloader.load_text_corpus()
raw_text_browser = TextBrowser(args.root, raw_text_corpus,
first_n_sentences=args.first_n_sentences)
# The browsers from which we pull representations
text_corpora = [ dloader.load_text_corpus(label) for label in args.labels]
text_browsers = [ TextBrowser(args.root, corpus,
first_n_sentences=args.first_n_sentences)
for corpus in text_corpora ]
run_interactive(vtlist, raw_text_corpus, raw_text_browser,
text_corpora, text_browsers, args.labels)
# Explicit delete
del raw_text_browser
for browser in text_browsers:
del browser
if __name__ == '__main__':
parser = build_argument_parser()
args = parser.parse_args()
if args.debug:
logging.basicConfig(format='%(levelname)s : %(message)s',
level=logging.DEBUG)
elif args.verbose:
logging.basicConfig(format='%(levelname)s : %(message)s',
level=logging.INFO)
main(args)
|
gpl-3.0
| -4,264,359,828,596,876,000 | 34.349693 | 82 | 0.559799 | false | 4.407804 | false | false | false |
indexofire/cdc
|
cdc/contrib/cache/helpers.py
|
1
|
12245
|
# -*- coding: utf-8 -*-
import types
import hashlib
import logging
import cPickle as pickle
from django.conf import settings
from django.core.cache import cache
from django.utils.encoding import smart_str
from keyedcache.utils import is_string_like, is_list_or_tuple
log = logging.getLogger('cdc_cache')
# The debugging variable CACHED_KEYS is exact only with the the Django
# debugging server (or any single worker process server) and without restarting
# the server between restarts of the main cache (memcached).
# Keys in CACHED_KEYS variable never expire and can eat much memory on long
# running servers. Currently it is not confirmed in Satchmo.
# If more worker processes are used, the reported values of the following three
# variables can skip randomly upwards downwards.
CACHED_KEYS = {}
CACHE_CALLS = 0
CACHE_HITS = 0
KEY_DELIM = "::"
REQUEST_CACHE = {'enabled' : False}
try:
CACHES = getattr(settings, "CACHES")
CACHE_BACKEND = CACHES['default']['BACKEND']
try:
CACHE_PREFIX = CACHES['default']['KEY_PREFIX']
except KeyError:
CACHE_PREFIX = str(settings.SITE_ID)
log.warn("No KEY_PREFIX found in settings.CACHES['default'], using SITE_ID. Please update your settings to add a CACHES")
try:
CACHE_TIMEOUT = CACHES['default']['TIMEOUT']
except KeyError:
CACHE_TIMEOUT = getattr(settings, 'CACHE_TIMEOUT', 0)
log.warn("No TIMEOUT found in settings.CACHES['default'], so we used %s%s. "
"Please update your settings to add a TIMEOUT and avoid this warning.",
CACHE_TIMEOUT,
CACHE_TIMEOUT == 0 and ", disabling the cache system" or "")
except AttributeError:
try:
CACHE_BACKEND = settings.CACHE_BACKEND
except AttributeError:
CACHE_BACKEND = "locmem://"
log.warn("No cache settings are set. Using default locmem. Please update your settings")
try:
CACHE_PREFIX = settings.CACHE_PREFIX
except AttributeError:
CACHE_PREFIX = str(settings.SITE_ID)
log.warn("No CACHE_PREFIX found in settings, using SITE_ID. Please update your settings to add a CACHE_PREFIX")
try:
CACHE_TIMEOUT = settings.CACHE_TIMEOUT
except AttributeError:
CACHE_TIMEOUT = 0
log.warn("No CACHE_TIMEOUT found in settings, so we used 0, disabling the cache system. Please update your settings to add a CACHE_TIMEOUT and avoid this warning.")
_CACHE_ENABLED = CACHE_TIMEOUT > 0
class CacheWrapper(object):
def __init__(self, val, inprocess=False):
self.val = val
self.inprocess = inprocess
def __str__(self):
return str(self.val)
def __repr__(self):
return repr(self.val)
def wrap(cls, obj):
if isinstance(obj, cls):
return obj
else:
return cls(obj)
wrap = classmethod(wrap)
class MethodNotFinishedError(Exception):
def __init__(self, f):
self.func = f
class NotCachedError(Exception):
def __init__(self, k):
self.key = k
class CacheNotRespondingError(Exception):
pass
def cache_delete(*keys, **kwargs):
"""
Deletes the object identified by all ``keys`` from the cache.
keys:
Parameters of general type which are convertable to string or hashable
unambiguously.
kwargs:
children:
If it is True more objects starting with these keys are deleted.
other kwargs:
Unknown key=val is interpreted like two aditional keys: (key, val)
If no keys are present, all cached objects are to be deleted.
Deleting multiple multiple or all objects is usually not complete if the
project is running with multiple worker processes.
(It is reliable e.g. with a development server.)
"""
removed = []
if cache_enabled():
global CACHED_KEYS
log.debug('cache_delete')
children = kwargs.pop('children', False)
if (keys or kwargs):
key = cache_key(*keys, **kwargs)
if CACHED_KEYS.has_key(key):
del CACHED_KEYS[key]
removed.append(key)
cache.delete(key)
if children:
key = key + KEY_DELIM
children = [x for x in CACHED_KEYS.keys() if x.startswith(key)]
for k in children:
del CACHED_KEYS[k]
cache.delete(k)
removed.append(k)
else:
key = "All Keys"
deleteneeded = _cache_flush_all()
removed = CACHED_KEYS.keys()
if deleteneeded:
for k in CACHED_KEYS:
cache.delete(k)
CACHED_KEYS = {}
if removed:
log.debug("Cache delete: %s", removed)
else:
log.debug("No cached objects to delete for %s", key)
return removed
def cache_delete_function(func):
return cache_delete(['func', func.__name__, func.__module__], children=True)
def cache_enabled():
global _CACHE_ENABLED
return _CACHE_ENABLED
def cache_enable(state=True):
global _CACHE_ENABLED
_CACHE_ENABLED=state
def _cache_flush_all():
if is_memcached_backend():
cache._cache.flush_all()
return False
return True
def cache_function(length=CACHE_TIMEOUT):
"""
A variant of the snippet posted by Jeff Wheeler at
http://www.djangosnippets.org/snippets/109/
Caches a function, using the function and its arguments as the key, and the return
value as the value saved. It passes all arguments on to the function, as
it should.
The decorator itself takes a length argument, which is the number of
seconds the cache will keep the result around.
It will put a temp value in the cache while the function is
processing. This should not matter in most cases, but if the app is using
threads, you won't be able to get the previous value, and will need to
wait until the function finishes. If this is not desired behavior, you can
remove the first two lines after the ``else``.
"""
def decorator(func):
def inner_func(*args, **kwargs):
if not cache_enabled():
value = func(*args, **kwargs)
else:
try:
value = cache_get('func', func.__name__, func.__module__, args, kwargs)
except NotCachedError, e:
# This will set a temporary value while ``func`` is being
# processed. When using threads, this is vital, as otherwise
# the function can be called several times before it finishes
# and is put into the cache.
funcwrapper = CacheWrapper(".".join([func.__module__, func.__name__]), inprocess=True)
cache_set(e.key, value=funcwrapper, length=length, skiplog=True)
value = func(*args, **kwargs)
cache_set(e.key, value=value, length=length)
except MethodNotFinishedError, e:
value = func(*args, **kwargs)
return value
return inner_func
return decorator
def cache_get(*keys, **kwargs):
"""
Gets the object identified by all ``keys`` from the cache.
kwargs:
default:
Default value used if the object is not in the cache. If the object
is not found and ``default`` is not set or is None, the exception
``NotCachedError`` is raised with the attribute ``.key = keys``.
other kwargs:
Unknown key=val is interpreted like two aditional keys: (key, val)
"""
if kwargs.has_key('default'):
default_value = kwargs.pop('default')
use_default = True
else:
use_default = False
key = cache_key(keys, **kwargs)
if not cache_enabled():
raise NotCachedError(key)
else:
global CACHE_CALLS, CACHE_HITS, REQUEST_CACHE
CACHE_CALLS += 1
if CACHE_CALLS == 1:
cache_require()
obj = None
tid = -1
if REQUEST_CACHE['enabled']:
tid = cache_get_request_uid()
if tid > -1:
try:
obj = REQUEST_CACHE[tid][key]
log.debug('Got from request cache: %s', key)
except KeyError:
pass
if obj == None:
obj = cache.get(key)
if obj and isinstance(obj, CacheWrapper):
CACHE_HITS += 1
CACHED_KEYS[key] = True
log.debug('got cached [%i/%i]: %s', CACHE_CALLS, CACHE_HITS, key)
if obj.inprocess:
raise MethodNotFinishedError(obj.val)
cache_set_request(key, obj, uid=tid)
return obj.val
else:
try:
del CACHED_KEYS[key]
except KeyError:
pass
if use_default:
return default_value
raise NotCachedError(key)
def cache_set(*keys, **kwargs):
"""Set the object identified by all ``keys`` into the cache.
kwargs:
value:
The object to be cached.
length:
Timeout for the object. Default is CACHE_TIMEOUT.
skiplog:
If it is True the call is never logged. Default is False.
other kwargs:
Unknown key=val is interpreted like two aditional keys: (key, val)
"""
if cache_enabled():
global CACHED_KEYS, REQUEST_CACHE
obj = kwargs.pop('value')
length = kwargs.pop('length', CACHE_TIMEOUT)
skiplog = kwargs.pop('skiplog', False)
key = cache_key(keys, **kwargs)
val = CacheWrapper.wrap(obj)
if not skiplog:
log.debug('setting cache: %s', key)
cache.set(key, val, length)
CACHED_KEYS[key] = True
if REQUEST_CACHE['enabled']:
cache_set_request(key, val)
def _hash_or_string(key):
if is_string_like(key) or isinstance(key, (types.IntType, types.LongType, types.FloatType)):
return smart_str(key)
else:
try:
#if it has a PK, use it.
return str(key._get_pk_val())
except AttributeError:
return md5_hash(key)
def cache_key(*keys, **pairs):
"""Smart key maker, returns the object itself if a key, else a list
delimited by ':', automatically hashing any non-scalar objects."""
if len(keys) == 1 and is_list_or_tuple(keys[0]):
keys = keys[0]
if pairs:
keys = list(keys)
for k in sorted(pairs.keys()):
keys.extend((k, pairs[k]))
key = KEY_DELIM.join([_hash_or_string(x) for x in keys])
prefix = CACHE_PREFIX + KEY_DELIM
if not key.startswith(prefix):
key = prefix+key
return key.replace(" ", ".")
def md5_hash(obj):
pickled = pickle.dumps(obj, protocol=pickle.HIGHEST_PROTOCOL)
return hashlib.md5(pickled).hexdigest()
def is_memcached_backend():
try:
return cache._cache.__module__.endswith('memcache')
except AttributeError:
return False
def cache_require():
"""Error if keyedcache isn't running."""
if cache_enabled():
key = cache_key('require_cache')
cache_set(key,value='1')
v = cache_get(key, default = '0')
if v != '1':
raise CacheNotRespondingError()
else:
log.debug("Cache responding OK")
return True
def cache_clear_request(uid):
"""Clears all locally cached elements with that uid"""
global REQUEST_CACHE
try:
del REQUEST_CACHE[uid]
log.debug('cleared request cache: %s', uid)
except KeyError:
pass
def cache_use_request_caching():
global REQUEST_CACHE
REQUEST_CACHE['enabled'] = True
def cache_get_request_uid():
from threaded_multihost import threadlocals
return threadlocals.get_thread_variable('request_uid', -1)
def cache_set_request(key, val, uid=None):
if uid == None:
uid = cache_get_request_uid()
if uid>-1:
global REQUEST_CACHE
if not uid in REQUEST_CACHE:
REQUEST_CACHE[uid] = {key:val}
else:
REQUEST_CACHE[uid][key] = val
|
mit
| 9,166,052,249,214,150,000 | 30.559278 | 173 | 0.596815 | false | 4.080307 | false | false | false |
diplomacy/research
|
diplomacy_research/scripts/render.py
|
1
|
9835
|
#!/usr/bin/env python3
# ==============================================================================
# Copyright 2019 - Philip Paquette
#
# NOTICE: Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# ==============================================================================
""" Renders a same tournament game
Argument: File path to .json in history folder
"""
import argparse
import os
import multiprocessing
import shutil
from diplomacy import Game
import ujson as json
from diplomacy_research.proto.diplomacy_proto.game_pb2 import SavedGame as SavedGameProto
from diplomacy_research.utils.proto import proto_to_dict, read_next_proto
def render_saved_game(saved_game, output_dir, prefix=''):
""" Renders a specific saved game
:param saved_game: The saved game to render
:param output_dir: The output directory where to save the rendering
:param prefix: An optional prefix to add before the game id
"""
if prefix:
output_dir = os.path.join(output_dir, prefix + '_' + saved_game['id'])
else:
output_dir = os.path.join(output_dir, saved_game['id'])
nb_phases = len(saved_game['phases'])
svg_count = 0
# Checking if already generated
# Otherwise, regenerating completely
if os.path.exists(output_dir):
nb_svg = len([os.path.join(output_dir, file) for file in os.listdir(output_dir) if file[-4:] == '.svg'])
if nb_svg == 2 * nb_phases:
print('Rendered {} (Skipped)'.format(saved_game['id']))
return
shutil.rmtree(output_dir, ignore_errors=True)
os.makedirs(output_dir, exist_ok=True)
# Creating a Game to replay all orders, and a new Game object per phase to validate
entire_game = Game()
if saved_game['phases']:
entire_game.set_state(saved_game['phases'][0]['state'])
# Rendering
for phase in saved_game['phases']:
phase_game = Game()
# Setting state
state = phase['state']
phase_game.set_state(state)
entire_game.note = phase_game.note
# Setting orders
phase_game.clear_orders()
orders = phase['orders']
for power_name in orders:
phase_game.set_orders(power_name, orders[power_name])
entire_game.set_orders(power_name, orders[power_name])
# Validating that we are at the same place
for power_name in orders:
assert sorted(phase_game.get_units(power_name)) == sorted(entire_game.get_units(power_name))
assert sorted(phase_game.get_centers(power_name)) == sorted(entire_game.get_centers(power_name))
# Rendering with and without orders
with open(os.path.join(output_dir, '%03d%s' % (svg_count, '.svg')), 'w') as file:
file.write(entire_game.render(incl_orders=False))
svg_count += 1
with open(os.path.join(output_dir, '%03d%s' % (svg_count, '.svg')), 'w') as file:
file.write(entire_game.render(incl_orders=True))
# Processing (for entire game)
svg_count += 1
entire_game.process()
print('Rendered {}'.format(saved_game['id']))
# =========================================
# ------- JSON RENDERING ----------
# =========================================
def render_json(file_path):
""" Renders a specific json file
:param file_path: The full path to the json file
:return: Nothing, but creates a directory (file_path without '.json') containing the rendered images
"""
dir_path = os.path.dirname(file_path)
# Aborting if file doesn't exist
if not os.path.exists(file_path):
print('File {} does not exist.'.format(file_path))
return
# Loading saved game
file_content = open(file_path, 'r').read()
saved_game = json.loads(file_content)
# Rendering
render_saved_game(saved_game, dir_path)
def render_multi_json_per_folder(history_dir, nb_json_per_folder):
""" Finds all subfolders under history and renders 'nb_jsons' games in each subfolder found
:param history_dir: The full path to the history folder
:param nb_json_per_folder: The number of jsons to render per subfolder
:return: Nothing
"""
jsons_to_render = []
# Finding files to render
subfolders = [os.path.join(history_dir, path)
for path in os.listdir(history_dir)
if os.path.isdir(os.path.join(history_dir, path))]
for folder in subfolders:
json_games = sorted([os.path.join(folder, json_filename)
for json_filename in os.listdir(folder)
if json_filename[-5:] == '.json'])
json_games = json_games[:nb_json_per_folder]
for json_path in json_games:
jsons_to_render += [json_path]
# Running over multiple processes
nb_cores = multiprocessing.cpu_count()
with multiprocessing.Pool(nb_cores) as pool:
pool.map(render_json, jsons_to_render)
# =========================================
# ------- PROTO RENDERING ----------
# =========================================
def render_saved_game_proto(saved_game_proto, output_dir, prefix='', json_only=False):
""" Renders a saved game proto
:param saved_game_proto: A `.proto.game.SavedGame` object
:param output_dir: The output directory where the save the renderings
:param prefix: An optional prefix to add before the game id
:param json_only: Indicates we only want to extract the underlying JSON
"""
saved_game = proto_to_dict(saved_game_proto)
if json_only:
os.makedirs(os.path.join(output_dir, 'json'), exist_ok=True)
output_path = os.path.join(output_dir, 'json', prefix + '_' + saved_game['id'] + '.json')
with open(output_path, 'w') as file:
file.write(json.dumps(saved_game))
print('Saved JSON for {}'.format(saved_game['id']))
else:
render_saved_game(saved_game, output_dir, prefix)
def render_proto_file(file_path, args, compressed=True):
""" Renders all saved game proto in a proto file
:param file_path: The path to the proto file
:param args: The parsed command line arguments
:param compressed: Boolean that indicates if compression was used.
"""
dir_path = os.path.dirname(file_path)
game_count = 0
# Aborting if file doesn't exist
if not os.path.exists(file_path):
print('File {} does not exist.'.format(file_path))
return
# Processing filter
games_to_render = []
if args.filter:
for part in args.filter.split(','):
if '-' in part:
start, stop = part.split('-')
games_to_render += list(range(int(start), int(stop) + 1))
elif ':' in part:
start, stop, step = part.split(':')
games_to_render += list(range(int(start), int(stop) + 1, int(step)))
else:
games_to_render += [int(part)]
# Rendering each game in the proto file
with open(file_path, 'rb') as file:
while True:
saved_game_proto = read_next_proto(SavedGameProto, file, compressed)
if saved_game_proto is None:
break
game_count += 1
if game_count in games_to_render or (not games_to_render and not args.count):
print('(Game #%d) ' % game_count, end='')
render_saved_game_proto(saved_game_proto, dir_path, prefix='%05d' % game_count, json_only=args.json)
if game_count % 100 == 0 and args.count:
print('... %d games found so far.' % game_count)
# Printing the number of games in the proto file
if args.count:
print('Found %d games in the proto file.' % game_count)
# =========================================
if __name__ == '__main__':
PARSER = argparse.ArgumentParser(description='Render some saved games.')
PARSER.add_argument('--count', action='store_true', help='Count the number of games in the file')
PARSER.add_argument('--json', action='store_true', help='Only extract jsons without rendering the games')
PARSER.add_argument('--filter', help='Only render some games e.g. 1-5,6,8,10:100:2')
PARSER.add_argument('--nb_per_folder', type=int, default=0, help='The number of games per folder to generate')
PARSER.add_argument('file_path', help='The file path containing the saved games.')
ARGS = PARSER.parse_args()
# Rendering a single JSON
# Syntax: render.py <json path>
if ARGS.file_path[-5:] == '.json':
render_json(ARGS.file_path)
exit(0)
# Render a series of game in a .pb file
# Syntax: render.py <pb path>
if ARGS.file_path[-3:] == '.pb':
render_proto_file(ARGS.file_path, ARGS, compressed=False)
exit(0)
if ARGS.file_path[-4:] == '.pbz':
render_proto_file(ARGS.file_path, ARGS, compressed=True)
exit(0)
# Rendering a certain number of JSON per folder
# Syntax: render.py <history/> --nb_per_folder <# of json per folder to generate>
if os.path.exists(ARGS.file_path) and ARGS.nb_per_folder:
render_multi_json_per_folder(ARGS.file_path, ARGS.nb_per_folder)
exit(0)
# Invalid syntax
PARSER.print_help()
exit(-1)
|
mit
| 5,406,339,455,071,944,000 | 40.673729 | 116 | 0.604677 | false | 3.773983 | false | false | false |
Shinao/SmartMirror
|
Motion/main.py
|
1
|
1803
|
import cv2
import time, os
from motion import Motion
from tornado import web, ioloop
import threading
import json
import requests
from config import config
import logging
# Send gesture to node server
logging.getLogger("requests").setLevel(logging.WARNING) # get infos on error
take_photo = False
photo_filepath = ""
def SendGesture(gesture):
try:
requests.get("http://localhost:3000/motion/gesture", params=json.dumps(gesture.properties))
except Exception as ex:
print("Could not send gesture: " + str(ex))
# Received command from node server to take a photo
def ManageCommands(motion):
global take_photo
if not take_photo:
return
print("Taking photo: " + photo_filepath)
cv2.imwrite("../public/" + photo_filepath, motion.currentFrame)
take_photo = False
# Main loop - get gestures and send them
def ManageMotion():
motion = Motion()
while motion.IsActive():
ManageCommands(motion)
# Manage motion and gestures
motion.GetInformationOnNextFrame()
if motion.TimeElapsedSinceLastMotion() > config['timeToWaitWhenNoMovementBeforeSleep']:
time.sleep(config['timeToSleepWhenNoMovement'])
gesture = motion.GetGesture()
threading.Thread(target=SendGesture, args=(gesture,)).start()
motion.Dispose()
os._exit(1)
class CommandHandler(web.RequestHandler):
def get(self):
global take_photo, photo_filepath
filepath = self.get_argument('filepath', 'public/frame.jpg')
take_photo = True
photo_filepath = filepath
if __name__ == '__main__':
threading.Thread(target=ManageMotion).start()
application = web.Application([
(r"/takePhoto", CommandHandler),
])
application.listen(3001)
ioloop.IOLoop.current().start()
|
mit
| 3,999,531,328,340,736,000 | 26.738462 | 99 | 0.687188 | false | 3.869099 | false | false | false |
iceslide/Lili
|
formatter.py
|
1
|
2383
|
# -*- coding: utf-8 -*-
import constants
__author__ = constants.__author__
__copyright__ = constants.__copyright__
__license__ = constants.__license__
def format(block):
""" Apply formatting to a block. """
wideopeningchars = constants.WIDE_OPENING_CHARS
text = block.gettext()
newtext = []
isquote = False
for i in range(len(text)):
line = text[i]
lineno = i + 1
if (lineno == 1 and len(line) > 0 and line[0] in wideopeningchars):
isquote = True
newtext.append(_formatline(line, lineno, isquote))
block.settext(newtext)
# =================================================================
def _formatline(line, lineno, isquote):
""" Apply formatting to a line. """
widewhitespace = constants.WIDE_WHITESPACE
wideopeningchars = constants.WIDE_OPENING_CHARS
wideclosingchars = constants.WIDE_CLOSING_CHARS
newline = constants.NEWLINE
#has_newline = line.endswith(newline)
if(line.strip() == ''):
# Empty line or filled with whitespaces
return line
line = line.rstrip()
#
# Indentation rules
#
# Remove leading normal white spaces
while (line.startswith(' ')):
line = line[1:]
#
if (lineno == 1 and isquote):
while (line[0] not in wideopeningchars):
line = line[1:]
if (lineno == 1 and not isquote):
if (not line.startswith(widewhitespace)):
line = widewhitespace + line
# Insert double width whitespace to align lines/paragraph
if (lineno > 1 and isquote):
if (not line.startswith(widewhitespace)):
line = widewhitespace + line
# If no quotation, the lines/paragraph is not aligned
if (lineno > 1 and not isquote):
if (line.startswith(widewhitespace)):
line = line[1:]
# A quote cannot end in dot '.', except in the case of ellipsis "..."
if (isquote):
for c in wideclosingchars:
i = line.find(c)
while(i != -1):
if(line[i - 1] == '.' and not line.endswith('...')):
line = line[:i - 1] + line[i:]
i = line.find(c, i+1)
#if (has_newline):
# line = line + constants.NEWLINE
return line
|
gpl-3.0
| 1,472,706,962,389,387,500 | 27.380952 | 75 | 0.535879 | false | 4.225177 | false | false | false |
Castronova/EMIT
|
wrappers/odm2.py
|
1
|
2672
|
__author__ = 'tonycastronova'
import wrappers
import stdlib
from wrappers import base
from utilities import geometry
from utilities.status import Status
from api_old.ODM2.Core.services import readCore
from api_old.ODM2.Results.services import readResults
class wrapper(base.BaseWrapper):
def __init__(self, args):
super(wrapper, self).__init__()
self.args = args
session = self.args['session']
resultid = self.args['resultid']
# get result object and result timeseries
core = readCore(session)
obj = core.getResultByID(resultID=int(resultid))
readres = readResults(session)
results = readres.getTimeSeriesValuesByResultId(resultId=int(resultid))
# separate the date and value pairs in the timeseries
dates = [date.ValueDateTime for date in results]
values = [val.DataValue for val in results]
# basic exchange item info
name = obj.VariableObj.VariableCode
desc = obj.VariableObj.VariableDefinition
type = stdlib.ExchangeItemType.OUTPUT
start = min(dates)
end = max(dates)
# build variable
variable = stdlib.Variable()
variable.VariableDefinition(obj.VariableObj.VariableDefinition)
variable.VariableNameCV(obj.VariableObj.VariableNameCV)
# build unit
unit = stdlib.Unit()
unit.UnitAbbreviation(obj.UnitObj.UnitsAbbreviation)
unit.UnitName(obj.UnitObj.UnitsName)
unit.UnitTypeCV(obj.UnitObj.UnitsTypeCV)
# build geometries
# todo: need to specify srs and elevation
wkb = str(obj.FeatureActionObj.SamplingFeatureObj.FeatureGeometry.data)
geom = geometry.fromWKB(wkb)
# build exchange item object
oei = stdlib.ExchangeItem( name=name,
desc=desc,
geometry=geom,
unit=unit,
variable=variable,type=type )
# set global parameters
self.name(name)
self.simulation_start(start)
self.simulation_end(end)
self.outputs(name=name, value=oei)
self.description(obj.VariableObj.VariableDefinition)
self.current_time(start)
# self.__obj = obj
# self.__resultid = obj.ResultID
# self.__session = session
# set model status
self.status(Status.Loaded)
def type(self):
return wrappers.Types().ODM2
def finish(self):
return
def prepare(self):
self.status(Status.Ready)
def run(self, inputs):
self.status(Status.Finished)
|
gpl-2.0
| 3,811,784,181,105,661,000 | 29.022472 | 79 | 0.623503 | false | 4.227848 | false | false | false |
garthg/petitions-dataverse
|
merge_doi_maps.py
|
1
|
2439
|
'''merge_doi_maps.py
Copyright 2018 Garth Griffin
Distributed under the GNU GPL v3. For full terms see the file LICENSE.
This file is part of PetitionsDataverse.
PetitionsDataverse is free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
PetitionsDataverse is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
PetitionsDataverse. If not, see <http://www.gnu.org/licenses/>.
________________________________________________________________________________
Author: Garth Griffin (http://garthgriffin.com)
Date: February 23 2018
'''
import sys
import collections
import tsvfile
merge_into_tsv = sys.argv[1]
merge_new_tsvs = sys.argv[2:]
def merge(merge_into_tsv, merge_new_tsv):
print 'Merge %s <-- %s' % (merge_into_tsv, merge_new_tsv)
rows = tsvfile.ReadDicts(merge_into_tsv)
update_rows = tsvfile.ReadDicts(merge_new_tsv)
prev_map_id = dict([(x['Local ID'], x['DOI']) for x in rows])
prev_map_doi = dict([(x['DOI'], x['Local ID']) for x in rows])
if len(prev_map_id) != len(rows):
raise ValueError('Non-unique local IDs in %s' % merge_into_tsv)
if len(prev_map_doi) != len(rows):
raise ValueError('Non-unique DOIs in %s' % merge_into_tsv)
counters = collections.defaultdict(int)
for row in update_rows:
counters['total'] += 1
local_id = row['Local ID']
doi = row['DOI']
needs_update = True
if local_id in prev_map_id:
if prev_map_id[local_id] != doi:
raise ValueError('Conflicted local ID in %s: %s' % (
merge_new_tsv, local_id))
needs_update = False
if doi in prev_map_doi:
if prev_map_doi[doi] != local_id:
raise ValueError('Conflicted DOI in %s: %s' % (merge_new_tsv, doi))
needs_update = False
if needs_update:
counters['update'] += 1
prev_map_id[local_id] = doi
prev_map_doi[doi] = local_id
rows.append(row)
else:
counters['preexisting'] += 1
print str(dict(counters))
tsvfile.WriteDicts(merge_into_tsv, rows)
for f in merge_new_tsvs:
merge(merge_into_tsv, f)
|
gpl-3.0
| 7,386,181,290,347,419,000 | 32.410959 | 80 | 0.664207 | false | 3.313859 | false | false | false |
k4ml/Marimorepy
|
mamopublic/common.py
|
1
|
2741
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2009, MARIMORE Inc Tokyo, Japan.
# Contributed by
# Iqbal Abdullah <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the MARIMORE Inc nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
This module defines commonly used code for the mamo package
"""
__author__ = "Iqbal Abdullah <[email protected]>"
__date__ = "$LastChangedDate$"
__version__ = "$LastChangedRevision$"
import re
class BaseClass(object):
"""
BaseClass contains very common functions
and implementations for mamo classes. All of mamo packages classes
uses BaseClass as their parent class
"""
def read_only_property(self):
raise (AttributeError), "Read-only attribute"
def _prop_set_classname(self, value):
self.read_only_property()
def _prop_get_classname(self):
compiled_re = re.compile("'.*'")
clsname = compiled_re.search("%s" % (self.__class__)).group()
clsname = clsname.replace("'","")
clsname = clsname.replace("%s" % (self.__module__), "")
clsname = clsname.replace(".","")
return clsname
myclassname = property(_prop_get_classname, _prop_set_classname,
doc="Returns the name of the class")
|
bsd-3-clause
| 641,584,754,417,837,600 | 41.828125 | 85 | 0.69938 | false | 4.09716 | false | false | false |
sympsi/sympsi
|
sympsi/commutator.py
|
1
|
7156
|
"""The commutator: [A,B] = A*B - B*A."""
from __future__ import print_function, division
from sympy import S, Expr, Mul, Add
from sympy.core.compatibility import u
from sympy.integrals.integrals import Integral
from sympy.printing.pretty.stringpict import prettyForm
from sympsi.dagger import Dagger
from sympsi.operator import Operator
__all__ = [
'Commutator'
]
#-----------------------------------------------------------------------------
# Commutator
#-----------------------------------------------------------------------------
class Commutator(Expr):
"""The standard commutator, in an unevaluated state.
Evaluating a commutator is defined [1]_ as: ``[A, B] = A*B - B*A``. This
class returns the commutator in an unevaluated form. To evaluate the
commutator, use the ``.doit()`` method.
Cannonical ordering of a commutator is ``[A, B]`` for ``A < B``. The
arguments of the commutator are put into canonical order using ``__cmp__``.
If ``B < A``, then ``[B, A]`` is returned as ``-[A, B]``.
Parameters
==========
A : Expr
The first argument of the commutator [A,B].
B : Expr
The second argument of the commutator [A,B].
Examples
========
>>> from sympsi import Commutator, Dagger, Operator
>>> from sympy.abc import x, y
>>> A = Operator('A')
>>> B = Operator('B')
>>> C = Operator('C')
Create a commutator and use ``.doit()`` to evaluate it:
>>> comm = Commutator(A, B)
>>> comm
[A,B]
>>> comm.doit()
A*B - B*A
The commutator orders it arguments in canonical order:
>>> comm = Commutator(B, A); comm
-[A,B]
Commutative constants are factored out:
>>> Commutator(3*x*A, x*y*B)
3*x**2*y*[A,B]
Using ``.expand(commutator=True)``, the standard commutator expansion rules
can be applied:
>>> Commutator(A+B, C).expand(commutator=True)
[A,C] + [B,C]
>>> Commutator(A, B+C).expand(commutator=True)
[A,B] + [A,C]
>>> Commutator(A*B, C).expand(commutator=True)
[A,C]*B + A*[B,C]
>>> Commutator(A, B*C).expand(commutator=True)
[A,B]*C + B*[A,C]
Adjoint operations applied to the commutator are properly applied to the
arguments:
>>> Dagger(Commutator(A, B))
-[Dagger(A),Dagger(B)]
References
==========
.. [1] http://en.wikipedia.org/wiki/Commutator
"""
is_commutative = False
def __new__(cls, A, B):
r = cls.eval(A, B)
if r is not None:
return r
obj = Expr.__new__(cls, A, B)
return obj
@classmethod
def eval(cls, a, b):
if not (a and b):
return S.Zero
if a == b:
return S.Zero
if a.is_commutative or b.is_commutative:
return S.Zero
# [xA,yB] -> xy*[A,B]
# from sympy.physics.qmul import QMul
ca, nca = a.args_cnc()
cb, ncb = b.args_cnc()
c_part = ca + cb
if c_part:
return Mul(Mul(*c_part), cls(Mul._from_args(nca), Mul._from_args(ncb)))
# Canonical ordering of arguments
# The Commutator [A, B] is in canonical form if A < B.
if a.compare(b) == 1:
return S.NegativeOne*cls(b, a)
def _eval_expand_commutator(self, **hints):
A = self.args[0]
B = self.args[1]
if isinstance(A, Add):
# [A + B, C] -> [A, C] + [B, C]
sargs = []
for term in A.args:
comm = Commutator(term, B)
if isinstance(comm, Commutator):
comm = comm._eval_expand_commutator()
sargs.append(comm)
return Add(*sargs)
elif isinstance(B, Add):
# [A, B + C] -> [A, B] + [A, C]
sargs = []
for term in B.args:
comm = Commutator(A, term)
if isinstance(comm, Commutator):
comm = comm._eval_expand_commutator()
sargs.append(comm)
return Add(*sargs)
elif isinstance(A, Mul):
# [A*B, C] -> A*[B, C] + [A, C]*B
a = A.args[0]
b = Mul(*A.args[1:])
c = B
comm1 = Commutator(b, c)
comm2 = Commutator(a, c)
if isinstance(comm1, Commutator):
comm1 = comm1._eval_expand_commutator()
if isinstance(comm2, Commutator):
comm2 = comm2._eval_expand_commutator()
first = Mul(a, comm1)
second = Mul(comm2, b)
return Add(first, second)
elif isinstance(B, Mul):
# [A, B*C] -> [A, B]*C + B*[A, C]
a = A
b = B.args[0]
c = Mul(*B.args[1:])
comm1 = Commutator(a, b)
comm2 = Commutator(a, c)
if isinstance(comm1, Commutator):
comm1 = comm1._eval_expand_commutator()
if isinstance(comm2, Commutator):
comm2 = comm2._eval_expand_commutator()
first = Mul(comm1, c)
second = Mul(b, comm2)
return Add(first, second)
elif isinstance(A, Integral):
# [∫adx, B] -> ∫[a, B]dx
func, lims = A.function, A.limits
new_args = [Commutator(func, B)]
for lim in lims:
new_args.append(lim)
return Integral(*new_args)
elif isinstance(B, Integral):
# [A, ∫bdx] -> ∫[A, b]dx
func, lims = B.function, B.limits
new_args = [Commutator(A, func)]
for lim in lims:
new_args.append(lim)
return Integral(*new_args)
# No changes, so return self
return self
def doit(self, **hints):
""" Evaluate commutator """
A = self.args[0]
B = self.args[1]
if isinstance(A, Operator) and isinstance(B, Operator):
try:
comm = A._eval_commutator(B, **hints)
except NotImplementedError:
try:
comm = -1*B._eval_commutator(A, **hints)
except NotImplementedError:
comm = None
if comm is not None:
return comm.doit(**hints)
return (A*B - B*A).doit(**hints)
def _eval_adjoint(self):
return Commutator(Dagger(self.args[1]), Dagger(self.args[0]))
def _sympyrepr(self, printer, *args):
return "%s(%s,%s)" % (
self.__class__.__name__, printer._print(
self.args[0]), printer._print(self.args[1])
)
def _sympystr(self, printer, *args):
return "[%s,%s]" % (self.args[0], self.args[1])
def _pretty(self, printer, *args):
pform = printer._print(self.args[0], *args)
pform = prettyForm(*pform.right((prettyForm(u(',')))))
pform = prettyForm(*pform.right((printer._print(self.args[1], *args))))
pform = prettyForm(*pform.parens(left='[', right=']'))
return pform
def _latex(self, printer, *args):
return "\\left[%s,%s\\right]" % tuple([
printer._print(arg, *args) for arg in self.args])
|
bsd-3-clause
| -1,256,890,188,336,245,500 | 30.488987 | 83 | 0.504896 | false | 3.441502 | false | false | false |
sveetch/boussole
|
tests/100_compiler/021_source_map.py
|
1
|
5369
|
# -*- coding: utf-8 -*-
import os
import io
import json
from boussole.conf.model import Settings
def test_source_map_path_001(compiler, temp_builds_dir):
"""
Check about source map path from 'sourceMappingURL' with a simple path
"""
basic_settings = Settings(initial={
"SOURCES_PATH": ".",
"TARGET_PATH": "css",
"SOURCE_MAP": True,
"OUTPUT_STYLES": "compact",
})
basedir = temp_builds_dir.join("compiler_source_map_path_001").strpath
sourcedir = os.path.normpath(
os.path.join(basedir, basic_settings.SOURCES_PATH)
)
targetdir = os.path.normpath(
os.path.join(basedir, basic_settings.TARGET_PATH)
)
os.makedirs(sourcedir)
os.makedirs(targetdir)
src = os.path.join(sourcedir, "app.scss")
dst = os.path.join(targetdir, "app.css")
src_map = os.path.join(targetdir, "app.map")
# Create sample source to compile
with io.open(src, "w", encoding="utf-8") as f:
f.write("""#content{ color:#ff0000; font-weight:bold; }""")
# Compile
success, message = compiler.safe_compile(basic_settings, src, dst)
assert os.path.exists(dst)
assert os.path.exists(src_map)
with io.open(dst, "r", encoding="utf-8") as f:
content = f.read()
with io.open(src_map, "r", encoding="utf-8") as f:
sourcemap = json.load(f)
# Assert compiled file is ok
assert content == (
"""#content { color: #ff0000; font-weight: bold; }\n\n"""
"""/*# sourceMappingURL=app.map */"""
)
# Drop keys we don't care for this test
del sourcemap["version"]
del sourcemap["mappings"]
del sourcemap["names"]
# Assert source map is ok
assert sourcemap == {
"file": "app.css",
"sources": [
"../app.scss"
],
}
def test_source_map_path_002(compiler, temp_builds_dir):
"""
Check about source map path from "sourceMappingURL" with CSS dir below
Sass source dir
"""
basic_settings = Settings(initial={
"SOURCES_PATH": "scss",
"TARGET_PATH": "project/css",
"SOURCE_MAP": True,
"OUTPUT_STYLES": "compact",
})
basedir = temp_builds_dir.join("compiler_source_map_path_002").strpath
sourcedir = os.path.normpath(
os.path.join(basedir, basic_settings.SOURCES_PATH)
)
targetdir = os.path.normpath(
os.path.join(basedir, basic_settings.TARGET_PATH)
)
os.makedirs(sourcedir)
os.makedirs(targetdir)
src = os.path.join(sourcedir, "app.scss")
dst = os.path.join(targetdir, "app.css")
src_map = os.path.join(targetdir, "app.map")
# Create sample source to compile
with io.open(src, "w", encoding="utf-8") as f:
f.write("""#content{ color:#ff0000; font-weight:bold; }""")
# Compile
success, message = compiler.safe_compile(basic_settings, src, dst)
assert os.path.exists(dst)
assert os.path.exists(src_map)
with io.open(dst, "r", encoding="utf-8") as f:
content = f.read()
with io.open(src_map, "r", encoding="utf-8") as f:
sourcemap = json.load(f)
# Assert compiled file is ok
assert content == (
"""#content { color: #ff0000; font-weight: bold; }\n\n"""
"""/*# sourceMappingURL=app.map */"""
)
# Drop keys we don't care for this test
del sourcemap["version"]
del sourcemap["mappings"]
del sourcemap["names"]
# Assert source map is ok
assert sourcemap == {
"file": "app.css",
"sources": [
"../../scss/app.scss"
],
}
def test_source_map_content(compiler, temp_builds_dir):
"""
Check about source map content
"""
basic_settings = Settings(initial={
"SOURCES_PATH": ".",
"TARGET_PATH": "css",
"SOURCE_MAP": True,
"OUTPUT_STYLES": "compact",
})
basedir = temp_builds_dir.join("compiler_source_map_content").strpath
sourcedir = os.path.normpath(os.path.join(basedir, basic_settings.SOURCES_PATH))
targetdir = os.path.normpath(os.path.join(basedir, basic_settings.TARGET_PATH))
os.makedirs(sourcedir)
os.makedirs(targetdir)
src = os.path.join(sourcedir, "app.scss")
dst = os.path.join(targetdir, "app.css")
src_map = os.path.join(targetdir, "app.map")
# Create sample source to compile
with io.open(src, "w", encoding="utf-8") as f:
f.write("""#content{ color:#ff0000; font-weight:bold; }""")
# Compile
success, message = compiler.safe_compile(basic_settings, src, dst)
assert os.path.exists(dst)
assert os.path.exists(src_map)
with io.open(dst, "r", encoding="utf-8") as f:
content = f.read()
with io.open(src_map, "r", encoding="utf-8") as f:
sourcemap = json.load(f)
# Assert compiled file is ok
assert content == (
"""#content { color: #ff0000; font-weight: bold; }\n\n"""
"""/*# sourceMappingURL=app.map */"""
)
# Drop 'version' key since it will cause problem with futur libsass
# versions
del sourcemap["version"]
# Assert source map is ok
assert sourcemap == {
"file": "app.css",
"sources": [
"../app.scss"
],
"mappings": ("AAAA,AAAA,QAAQ,CAAA,EAAE,KAAK,EAAC,OAAO,EAAE,WAAW,EAAC,"
"IAAI,GAAI"),
"names": []
}
|
mit
| -2,243,959,979,003,704,000 | 26.963542 | 84 | 0.591917 | false | 3.353529 | false | false | false |
karies/root
|
tutorials/roofit/rf106_plotdecoration.py
|
6
|
2263
|
## \file
## \ingroup tutorial_roofit
## \notebook
## Basic functionality: adding boxes with parameters to RooPlots and decorating with arrows, etc...
##
## \macro_code
##
## \author Clemens Lange, Wouter Verkerke (C++ version)
import ROOT
# Set up model
# ---------------------
# Create observables
x = ROOT.RooRealVar("x", "x", -10, 10)
# Create Gaussian
sigma = ROOT.RooRealVar("sigma", "sigma", 1, 0.1, 10)
mean = ROOT.RooRealVar("mean", "mean", -3, -10, 10)
gauss = ROOT.RooGaussian("gauss", "gauss", x, mean, sigma)
# Generate a sample of 1000 events with sigma=3
data = gauss.generate(ROOT.RooArgSet(x), 1000)
# Fit pdf to data
gauss.fitTo(data)
# Plot p.d.f. and data
# -------------------------------------
# Overlay projection of gauss on data
frame = x.frame(ROOT.RooFit.Name("xframe"), ROOT.RooFit.Title(
"RooPlot with decorations"), ROOT.RooFit.Bins(40))
data.plotOn(frame)
gauss.plotOn(frame)
# Add box with pdf parameters
# -----------------------------------------------------
# Left edge of box starts at 55% of Xaxis)
gauss.paramOn(frame, ROOT.RooFit.Layout(0.55))
# Add box with data statistics
# -------------------------------------------------------
# X size of box is from 55% to 99% of Xaxis range, of box is at 80% of
# Yaxis range)
data.statOn(frame, ROOT.RooFit.Layout(0.55, 0.99, 0.8))
# Add text and arrow
# -----------------------------------
# Add text to frame
txt = ROOT.TText(2, 100, "Signal")
txt.SetTextSize(0.04)
txt.SetTextColor(ROOT.kRed)
frame.addObject(txt)
# Add arrow to frame
arrow = ROOT.TArrow(2, 100, -1, 50, 0.01, "|>")
arrow.SetLineColor(ROOT.kRed)
arrow.SetFillColor(ROOT.kRed)
arrow.SetLineWidth(3)
frame.addObject(arrow)
# Persist frame with all decorations in ROOT file
# ---------------------------------------------------------------------------------------------
f = ROOT.TFile("rf106_plotdecoration.root", "RECREATE")
frame.Write()
f.Close()
# To read back and plot frame with all decorations in clean root session do
# root> ROOT.TFile f("rf106_plotdecoration.root")
# root> xframe.Draw()
c = ROOT.TCanvas("rf106_plotdecoration", "rf106_plotdecoration", 600, 600)
ROOT.gPad.SetLeftMargin(0.15)
frame.GetYaxis().SetTitleOffset(1.6)
frame.Draw()
c.SaveAs("rf106_plotdecoration.png")
|
lgpl-2.1
| -1,037,350,025,528,606,300 | 26.26506 | 99 | 0.615996 | false | 3.013316 | false | true | false |
chenchiyuan/yajiong
|
applications/posts/management/commands/parse_weixin.py
|
1
|
1199
|
# -*- coding: utf-8 -*-
# __author__ = chenchiyuan
from __future__ import division, unicode_literals, print_function
from django.core.management import BaseCommand
import requests
from bs4 import BeautifulSoup
from applications.posts.models import Post
import time
headers = {
"referer": "http://weixin.sogou.com/",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36",
}
def smart_print(text):
print(text.encode("utf-8"))
class Command(BaseCommand):
def handle(self, *args, **options):
posts = list(Post.objects.all())
for post in posts:
if post.url and not post.content:
try:
self.parse(post)
except Exception, err:
smart_print(err.message)
continue
time.sleep(0.2)
def parse(self, post):
smart_print(post.title)
content = requests.get(post.url, headers=headers).content
soup = BeautifulSoup(content)
page_content_tag = soup.find(id="page-content")
post.content = page_content_tag.extract
post.save()
|
bsd-3-clause
| 7,479,947,682,873,907,000 | 28.268293 | 141 | 0.614679 | false | 3.712074 | false | false | false |
laurentb/weboob
|
modules/ipapi/module.py
|
1
|
2091
|
# -*- coding: utf-8 -*-
# Copyright(C) 2015 Julien Veyssier
#
# This file is part of a weboob module.
#
# This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
from weboob.capabilities.geolocip import CapGeolocIp, IpLocation
from weboob.tools.backend import Module
from weboob.browser.browsers import Browser
from weboob.tools.json import json
__all__ = ['IpapiModule']
class IpapiModule(Module, CapGeolocIp):
NAME = 'ipapi'
MAINTAINER = u'Julien Veyssier'
EMAIL = '[email protected]'
VERSION = '2.1'
LICENSE = 'AGPLv3+'
DESCRIPTION = u"IP-API Geolocation API"
BROWSER = Browser
def get_location(self, ipaddr):
res = self.browser.location(u'http://ip-api.com/json/%s' % ipaddr)
jres = json.loads(res.text)
if "status" in jres and jres["status"] == "fail":
raise Exception("IPAPI failure : %s" % jres["message"])
iploc = IpLocation(ipaddr)
iploc.city = u'%s'%jres['city']
iploc.region = u'%s'%jres['regionName']
iploc.zipcode = u'%s'%jres['zip']
iploc.country = u'%s'%jres['country']
if jres['lat'] != '':
iploc.lt = float(jres['lat'])
else:
iploc.lt = 0.0
if jres['lon'] != '':
iploc.lg = float(jres['lon'])
else:
iploc.lg = 0.0
#iploc.host = 'NA'
#iploc.tld = 'NA'
if 'isp' in jres:
iploc.isp = u'%s'%jres['isp']
return iploc
|
lgpl-3.0
| 1,998,065,906,981,396,000 | 31.671875 | 77 | 0.636538 | false | 3.411093 | false | false | false |
marmyshev/transitions
|
openlp/plugins/bibles/lib/biblestab.py
|
1
|
26262
|
# -*- coding: utf-8 -*-
# vim: autoindent shiftwidth=4 expandtab textwidth=120 tabstop=4 softtabstop=4
###############################################################################
# OpenLP - Open Source Lyrics Projection #
# --------------------------------------------------------------------------- #
# Copyright (c) 2008-2013 Raoul Snyman #
# Portions copyright (c) 2008-2013 Tim Bentley, Gerald Britton, Jonathan #
# Corwin, Samuel Findlay, Michael Gorven, Scott Guerrieri, Matthias Hub, #
# Meinert Jordan, Armin Köhler, Erik Lundin, Edwin Lunando, Brian T. Meyer. #
# Joshua Miller, Stevan Pettit, Andreas Preikschat, Mattias Põldaru, #
# Christian Richter, Philip Ridout, Simon Scudder, Jeffrey Smith, #
# Maikel Stuivenberg, Martin Thompson, Jon Tibble, Dave Warnock, #
# Frode Woldsund, Martin Zibricky, Patrick Zimmermann #
# --------------------------------------------------------------------------- #
# This program is free software; you can redistribute it and/or modify it #
# under the terms of the GNU General Public License as published by the Free #
# Software Foundation; version 2 of the License. #
# #
# This program is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for #
# more details. #
# #
# You should have received a copy of the GNU General Public License along #
# with this program; if not, write to the Free Software Foundation, Inc., 59 #
# Temple Place, Suite 330, Boston, MA 02111-1307 USA #
###############################################################################
import logging
from PyQt4 import QtCore, QtGui
from openlp.core.lib import Receiver, SettingsTab, Settings, UiStrings, translate
from openlp.core.lib.ui import find_and_set_in_combo_box
from openlp.plugins.bibles.lib import LayoutStyle, DisplayStyle, update_reference_separators, \
get_reference_separator, LanguageSelection
log = logging.getLogger(__name__)
class BiblesTab(SettingsTab):
"""
BiblesTab is the Bibles settings tab in the settings dialog.
"""
log.info(u'Bible Tab loaded')
def __init__(self, parent, title, visible_title, icon_path):
self.paragraph_style = True
self.show_new_chapters = False
self.display_style = 0
SettingsTab.__init__(self, parent, title, visible_title, icon_path)
def setupUi(self):
self.setObjectName(u'BiblesTab')
SettingsTab.setupUi(self)
self.verseDisplayGroupBox = QtGui.QGroupBox(self.leftColumn)
self.verseDisplayGroupBox.setObjectName(u'verseDisplayGroupBox')
self.verseDisplayLayout = QtGui.QFormLayout(self.verseDisplayGroupBox)
self.verseDisplayLayout.setObjectName(u'verseDisplayLayout')
self.newChaptersCheckBox = QtGui.QCheckBox(self.verseDisplayGroupBox)
self.newChaptersCheckBox.setObjectName(u'newChaptersCheckBox')
self.verseDisplayLayout.addRow(self.newChaptersCheckBox)
self.displayStyleLabel = QtGui.QLabel(self.verseDisplayGroupBox)
self.displayStyleLabel.setObjectName(u'displayStyleLabel')
self.displayStyleComboBox = QtGui.QComboBox(self.verseDisplayGroupBox)
self.displayStyleComboBox.addItems([u'', u'', u'', u''])
self.displayStyleComboBox.setObjectName(u'displayStyleComboBox')
self.verseDisplayLayout.addRow(self.displayStyleLabel, self.displayStyleComboBox)
self.layoutStyleLabel = QtGui.QLabel(self.verseDisplayGroupBox)
self.layoutStyleLabel.setObjectName(u'layoutStyleLabel')
self.layoutStyleComboBox = QtGui.QComboBox(self.verseDisplayGroupBox)
self.layoutStyleComboBox.setObjectName(u'layoutStyleComboBox')
self.layoutStyleComboBox.addItems([u'', u'', u''])
self.verseDisplayLayout.addRow(self.layoutStyleLabel, self.layoutStyleComboBox)
self.bibleSecondCheckBox = QtGui.QCheckBox(self.verseDisplayGroupBox)
self.bibleSecondCheckBox.setObjectName(u'bibleSecondCheckBox')
self.verseDisplayLayout.addRow(self.bibleSecondCheckBox)
self.bibleThemeLabel = QtGui.QLabel(self.verseDisplayGroupBox)
self.bibleThemeLabel.setObjectName(u'BibleThemeLabel')
self.bibleThemeComboBox = QtGui.QComboBox(self.verseDisplayGroupBox)
self.bibleThemeComboBox.setSizeAdjustPolicy(QtGui.QComboBox.AdjustToMinimumContentsLength)
self.bibleThemeComboBox.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
self.bibleThemeComboBox.addItem(u'')
self.bibleThemeComboBox.setObjectName(u'BibleThemeComboBox')
self.verseDisplayLayout.addRow(self.bibleThemeLabel, self.bibleThemeComboBox)
self.changeNoteLabel = QtGui.QLabel(self.verseDisplayGroupBox)
self.changeNoteLabel.setWordWrap(True)
self.changeNoteLabel.setObjectName(u'changeNoteLabel')
self.verseDisplayLayout.addRow(self.changeNoteLabel)
self.leftLayout.addWidget(self.verseDisplayGroupBox)
self.scriptureReferenceGroupBox = QtGui.QGroupBox(self.leftColumn)
self.scriptureReferenceGroupBox.setObjectName(u'scriptureReferenceGroupBox')
self.scriptureReferenceLayout = QtGui.QGridLayout(self.scriptureReferenceGroupBox)
self.verseSeparatorCheckBox = QtGui.QCheckBox(self.scriptureReferenceGroupBox)
self.verseSeparatorCheckBox.setObjectName(u'verseSeparatorCheckBox')
self.scriptureReferenceLayout.addWidget(self.verseSeparatorCheckBox, 0, 0)
self.verseSeparatorLineEdit = QtGui.QLineEdit(self.scriptureReferenceGroupBox)
# self.verseSeparatorLineEdit.setPalette
self.verseSeparatorLineEdit.setObjectName(u'verseSeparatorLineEdit')
self.scriptureReferenceLayout.addWidget(self.verseSeparatorLineEdit, 0, 1)
self.rangeSeparatorCheckBox = QtGui.QCheckBox(self.scriptureReferenceGroupBox)
self.rangeSeparatorCheckBox.setObjectName(u'rangeSeparatorCheckBox')
self.scriptureReferenceLayout.addWidget(self.rangeSeparatorCheckBox, 1, 0)
self.rangeSeparatorLineEdit = QtGui.QLineEdit(self.scriptureReferenceGroupBox)
self.rangeSeparatorLineEdit.setObjectName(u'rangeSeparatorLineEdit')
self.scriptureReferenceLayout.addWidget(self.rangeSeparatorLineEdit, 1, 1)
self.listSeparatorCheckBox = QtGui.QCheckBox(self.scriptureReferenceGroupBox)
self.listSeparatorCheckBox.setObjectName(u'listSeparatorCheckBox')
self.scriptureReferenceLayout.addWidget(self.listSeparatorCheckBox, 2, 0)
self.listSeparatorLineEdit = QtGui.QLineEdit(self.scriptureReferenceGroupBox)
self.listSeparatorLineEdit.setObjectName(u'listSeparatorLineEdit')
self.scriptureReferenceLayout.addWidget(self.listSeparatorLineEdit, 2, 1)
self.endSeparatorCheckBox = QtGui.QCheckBox(self.scriptureReferenceGroupBox)
self.endSeparatorCheckBox.setObjectName(u'endSeparatorCheckBox')
self.scriptureReferenceLayout.addWidget(self.endSeparatorCheckBox, 3, 0)
self.endSeparatorLineEdit = QtGui.QLineEdit(self.scriptureReferenceGroupBox)
self.endSeparatorLineEdit.setObjectName(u'endSeparatorLineEdit')
self.endSeparatorLineEdit.setValidator(QtGui.QRegExpValidator(QtCore.QRegExp(r'[^0-9]*'),
self.endSeparatorLineEdit))
self.scriptureReferenceLayout.addWidget(self.endSeparatorLineEdit, 3, 1)
self.leftLayout.addWidget(self.scriptureReferenceGroupBox)
self.rightColumn.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred)
self.languageSelectionGroupBox = QtGui.QGroupBox(self.rightColumn)
self.languageSelectionGroupBox.setObjectName(u'languageSelectionGroupBox')
self.languageSelectionLayout = QtGui.QVBoxLayout(self.languageSelectionGroupBox)
self.languageSelectionLabel = QtGui.QLabel(self.languageSelectionGroupBox)
self.languageSelectionLabel.setObjectName(u'languageSelectionLabel')
self.languageSelectionComboBox = QtGui.QComboBox(self.languageSelectionGroupBox)
self.languageSelectionComboBox.setObjectName(u'languageSelectionComboBox')
self.languageSelectionComboBox.addItems([u'', u'', u''])
self.languageSelectionLayout.addWidget(self.languageSelectionLabel)
self.languageSelectionLayout.addWidget(self.languageSelectionComboBox)
self.rightLayout.addWidget(self.languageSelectionGroupBox)
self.leftLayout.addStretch()
self.rightLayout.addStretch()
# Signals and slots
QtCore.QObject.connect(self.newChaptersCheckBox, QtCore.SIGNAL(u'stateChanged(int)'),
self.onNewChaptersCheckBoxChanged)
QtCore.QObject.connect(self.displayStyleComboBox, QtCore.SIGNAL(u'activated(int)'),
self.onDisplayStyleComboBoxChanged)
QtCore.QObject.connect(self.bibleThemeComboBox, QtCore.SIGNAL(u'activated(int)'),
self.onBibleThemeComboBoxChanged)
QtCore.QObject.connect(self.layoutStyleComboBox, QtCore.SIGNAL(u'activated(int)'),
self.onLayoutStyleComboBoxChanged)
QtCore.QObject.connect(self.bibleSecondCheckBox, QtCore.SIGNAL(u'stateChanged(int)'),
self.onBibleSecondCheckBox)
QtCore.QObject.connect(self.verseSeparatorCheckBox, QtCore.SIGNAL(u'clicked(bool)'),
self.onVerseSeparatorCheckBoxClicked)
QtCore.QObject.connect(self.verseSeparatorLineEdit, QtCore.SIGNAL(u'textEdited(QString)'),
self.onVerseSeparatorLineEditEdited)
QtCore.QObject.connect(self.verseSeparatorLineEdit, QtCore.SIGNAL(u'editingFinished()'),
self.onVerseSeparatorLineEditFinished)
QtCore.QObject.connect(self.rangeSeparatorCheckBox, QtCore.SIGNAL(u'clicked(bool)'),
self.onRangeSeparatorCheckBoxClicked)
QtCore.QObject.connect(self.rangeSeparatorLineEdit, QtCore.SIGNAL(u'textEdited(QString)'),
self.onRangeSeparatorLineEditEdited)
QtCore.QObject.connect(self.rangeSeparatorLineEdit, QtCore.SIGNAL(u'editingFinished()'),
self.onRangeSeparatorLineEditFinished)
QtCore.QObject.connect(self.listSeparatorCheckBox, QtCore.SIGNAL(u'clicked(bool)'),
self.onListSeparatorCheckBoxClicked)
QtCore.QObject.connect(self.listSeparatorLineEdit, QtCore.SIGNAL(u'textEdited(QString)'),
self.onListSeparatorLineEditEdited)
QtCore.QObject.connect(self.listSeparatorLineEdit, QtCore.SIGNAL(u'editingFinished()'),
self.onListSeparatorLineEditFinished)
QtCore.QObject.connect(self.endSeparatorCheckBox, QtCore.SIGNAL(u'clicked(bool)'),
self.onEndSeparatorCheckBoxClicked)
QtCore.QObject.connect(self.endSeparatorLineEdit, QtCore.SIGNAL(u'textEdited(QString)'),
self.onEndSeparatorLineEditEdited)
QtCore.QObject.connect(self.endSeparatorLineEdit, QtCore.SIGNAL(u'editingFinished()'),
self.onEndSeparatorLineEditFinished)
QtCore.QObject.connect(Receiver.get_receiver(), QtCore.SIGNAL(u'theme_update_list'), self.updateThemeList)
QtCore.QObject.connect(self.languageSelectionComboBox, QtCore.SIGNAL(u'activated(int)'),
self.onLanguageSelectionComboBoxChanged)
def retranslateUi(self):
self.verseDisplayGroupBox.setTitle(translate('BiblesPlugin.BiblesTab', 'Verse Display'))
self.newChaptersCheckBox.setText(translate('BiblesPlugin.BiblesTab', 'Only show new chapter numbers'))
self.layoutStyleLabel.setText(UiStrings().LayoutStyle)
self.displayStyleLabel.setText(UiStrings().DisplayStyle)
self.bibleThemeLabel.setText(translate('BiblesPlugin.BiblesTab', 'Bible theme:'))
self.layoutStyleComboBox.setItemText(LayoutStyle.VersePerSlide, UiStrings().VersePerSlide)
self.layoutStyleComboBox.setItemText(LayoutStyle.VersePerLine, UiStrings().VersePerLine)
self.layoutStyleComboBox.setItemText(LayoutStyle.Continuous, UiStrings().Continuous)
self.displayStyleComboBox.setItemText(DisplayStyle.NoBrackets,
translate('BiblesPlugin.BiblesTab', 'No Brackets'))
self.displayStyleComboBox.setItemText(DisplayStyle.Round,
translate('BiblesPlugin.BiblesTab', '( And )'))
self.displayStyleComboBox.setItemText(DisplayStyle.Curly,
translate('BiblesPlugin.BiblesTab', '{ And }'))
self.displayStyleComboBox.setItemText(DisplayStyle.Square,
translate('BiblesPlugin.BiblesTab', '[ And ]'))
self.changeNoteLabel.setText(translate('BiblesPlugin.BiblesTab',
'Note:\nChanges do not affect verses already in the service.'))
self.bibleSecondCheckBox.setText(translate('BiblesPlugin.BiblesTab', 'Display second Bible verses'))
self.scriptureReferenceGroupBox.setTitle(translate('BiblesPlugin.BiblesTab', 'Custom Scripture References'))
self.verseSeparatorCheckBox.setText(translate('BiblesPlugin.BiblesTab', 'Verse Separator:'))
self.rangeSeparatorCheckBox.setText(translate('BiblesPlugin.BiblesTab', 'Range Separator:'))
self.listSeparatorCheckBox.setText(translate('BiblesPlugin.BiblesTab', 'List Separator:'))
self.endSeparatorCheckBox.setText(translate('BiblesPlugin.BiblesTab', 'End Mark:'))
#@todo these are common so move to StringsUI and reuse.
self.verseSeparatorLineEdit.setToolTip(
translate('BiblesPlugin.BiblesTab', 'Multiple alternative '
'verse separators may be defined.\nThey have to be separated '
'by a vertical bar "|".\nPlease clear this edit line to use '
'the default value.'))
self.rangeSeparatorLineEdit.setToolTip(
translate('BiblesPlugin.BiblesTab', 'Multiple alternative '
'range separators may be defined.\nThey have to be separated '
'by a vertical bar "|".\nPlease clear this edit line to use '
'the default value.'))
self.listSeparatorLineEdit.setToolTip(
translate('BiblesPlugin.BiblesTab', 'Multiple alternative '
'list separators may be defined.\nThey have to be separated '
'by a vertical bar "|".\nPlease clear this edit line to use '
'the default value.'))
self.endSeparatorLineEdit.setToolTip(
translate('BiblesPlugin.BiblesTab', 'Multiple alternative '
'end marks may be defined.\nThey have to be separated by a '
'vertical bar "|".\nPlease clear this edit line to use the '
'default value.'))
self.languageSelectionGroupBox.setTitle(translate('BiblesPlugin.BiblesTab', 'Default Bible Language'))
self.languageSelectionLabel.setText(translate('BiblesPlugin.BiblesTab',
'Book name language in search field,\nsearch results and on display:'))
self.languageSelectionComboBox.setItemText(LanguageSelection.Bible,
translate('BiblesPlugin.BiblesTab', 'Bible Language'))
self.languageSelectionComboBox.setItemText(LanguageSelection.Application,
translate('BiblesPlugin.BiblesTab', 'Application Language'))
self.languageSelectionComboBox.setItemText(LanguageSelection.English,
translate('BiblesPlugin.BiblesTab', 'English'))
def onBibleThemeComboBoxChanged(self):
self.bible_theme = self.bibleThemeComboBox.currentText()
def onDisplayStyleComboBoxChanged(self):
self.display_style = self.displayStyleComboBox.currentIndex()
def onLayoutStyleComboBoxChanged(self):
self.layout_style = self.layoutStyleComboBox.currentIndex()
def onLanguageSelectionComboBoxChanged(self):
self.language_selection = self.languageSelectionComboBox.currentIndex()
def onNewChaptersCheckBoxChanged(self, check_state):
self.show_new_chapters = False
# We have a set value convert to True/False.
if check_state == QtCore.Qt.Checked:
self.show_new_chapters = True
def onBibleSecondCheckBox(self, check_state):
self.second_bibles = False
# We have a set value convert to True/False.
if check_state == QtCore.Qt.Checked:
self.second_bibles = True
def onVerseSeparatorCheckBoxClicked(self, checked):
if checked:
self.verseSeparatorLineEdit.setFocus()
else:
self.verseSeparatorLineEdit.setText(get_reference_separator(u'sep_v_default'))
self.verseSeparatorLineEdit.setPalette(self.getGreyTextPalette(not checked))
def onVerseSeparatorLineEditEdited(self, text):
self.verseSeparatorCheckBox.setChecked(True)
self.verseSeparatorLineEdit.setPalette(self.getGreyTextPalette(False))
def onVerseSeparatorLineEditFinished(self):
if self.verseSeparatorLineEdit.isModified():
text = self.verseSeparatorLineEdit.text()
if text == get_reference_separator(u'sep_v_default') or not text.replace(u'|', u''):
self.verseSeparatorCheckBox.setChecked(False)
self.verseSeparatorLineEdit.setText(get_reference_separator(u'sep_v_default'))
self.verseSeparatorLineEdit.setPalette(self.getGreyTextPalette(True))
def onRangeSeparatorCheckBoxClicked(self, checked):
if checked:
self.rangeSeparatorLineEdit.setFocus()
else:
self.rangeSeparatorLineEdit.setText(get_reference_separator(u'sep_r_default'))
self.rangeSeparatorLineEdit.setPalette(self.getGreyTextPalette(not checked))
def onRangeSeparatorLineEditEdited(self, text):
self.rangeSeparatorCheckBox.setChecked(True)
self.rangeSeparatorLineEdit.setPalette(self.getGreyTextPalette(False))
def onRangeSeparatorLineEditFinished(self):
if self.rangeSeparatorLineEdit.isModified():
text = self.rangeSeparatorLineEdit.text()
if text == get_reference_separator(u'sep_r_default') or not text.replace(u'|', u''):
self.rangeSeparatorCheckBox.setChecked(False)
self.rangeSeparatorLineEdit.setText(get_reference_separator(u'sep_r_default'))
self.rangeSeparatorLineEdit.setPalette(self.getGreyTextPalette(True))
def onListSeparatorCheckBoxClicked(self, checked):
if checked:
self.listSeparatorLineEdit.setFocus()
else:
self.listSeparatorLineEdit.setText(get_reference_separator(u'sep_l_default'))
self.listSeparatorLineEdit.setPalette(self.getGreyTextPalette(not checked))
def onListSeparatorLineEditEdited(self, text):
self.listSeparatorCheckBox.setChecked(True)
self.listSeparatorLineEdit.setPalette(self.getGreyTextPalette(False))
def onListSeparatorLineEditFinished(self):
if self.listSeparatorLineEdit.isModified():
text = self.listSeparatorLineEdit.text()
if text == get_reference_separator(u'sep_l_default') or not text.replace(u'|', u''):
self.listSeparatorCheckBox.setChecked(False)
self.listSeparatorLineEdit.setText(get_reference_separator(u'sep_l_default'))
self.listSeparatorLineEdit.setPalette(self.getGreyTextPalette(True))
def onEndSeparatorCheckBoxClicked(self, checked):
if checked:
self.endSeparatorLineEdit.setFocus()
else:
self.endSeparatorLineEdit.setText(get_reference_separator(u'sep_e_default'))
self.endSeparatorLineEdit.setPalette(self.getGreyTextPalette(not checked))
def onEndSeparatorLineEditEdited(self, text):
self.endSeparatorCheckBox.setChecked(True)
self.endSeparatorLineEdit.setPalette(self.getGreyTextPalette(False))
def onEndSeparatorLineEditFinished(self):
if self.endSeparatorLineEdit.isModified():
text = self.endSeparatorLineEdit.text()
if text == get_reference_separator(u'sep_e_default') or not text.replace(u'|', u''):
self.endSeparatorCheckBox.setChecked(False)
self.endSeparatorLineEdit.setText(get_reference_separator(u'sep_e_default'))
self.endSeparatorLineEdit.setPalette(self.getGreyTextPalette(True))
def load(self):
settings = Settings()
settings.beginGroup(self.settingsSection)
self.show_new_chapters = settings.value(u'display new chapter')
self.display_style = settings.value(u'display brackets')
self.layout_style = settings.value(u'verse layout style')
self.bible_theme = settings.value(u'bible theme')
self.second_bibles = settings.value(u'second bibles')
self.newChaptersCheckBox.setChecked(self.show_new_chapters)
self.displayStyleComboBox.setCurrentIndex(self.display_style)
self.layoutStyleComboBox.setCurrentIndex(self.layout_style)
self.bibleSecondCheckBox.setChecked(self.second_bibles)
verse_separator = settings.value(u'verse separator')
if (verse_separator.strip(u'|') == u'') or (verse_separator == get_reference_separator(u'sep_v_default')):
self.verseSeparatorLineEdit.setText(get_reference_separator(u'sep_v_default'))
self.verseSeparatorLineEdit.setPalette(self.getGreyTextPalette(True))
self.verseSeparatorCheckBox.setChecked(False)
else:
self.verseSeparatorLineEdit.setText(verse_separator)
self.verseSeparatorLineEdit.setPalette(self.getGreyTextPalette(False))
self.verseSeparatorCheckBox.setChecked(True)
range_separator = settings.value(u'range separator')
if (range_separator.strip(u'|') == u'') or (range_separator == get_reference_separator(u'sep_r_default')):
self.rangeSeparatorLineEdit.setText(get_reference_separator(u'sep_r_default'))
self.rangeSeparatorLineEdit.setPalette(self.getGreyTextPalette(True))
self.rangeSeparatorCheckBox.setChecked(False)
else:
self.rangeSeparatorLineEdit.setText(range_separator)
self.rangeSeparatorLineEdit.setPalette(self.getGreyTextPalette(False))
self.rangeSeparatorCheckBox.setChecked(True)
list_separator = settings.value(u'list separator')
if (list_separator.strip(u'|') == u'') or (list_separator == get_reference_separator(u'sep_l_default')):
self.listSeparatorLineEdit.setText(get_reference_separator(u'sep_l_default'))
self.listSeparatorLineEdit.setPalette(self.getGreyTextPalette(True))
self.listSeparatorCheckBox.setChecked(False)
else:
self.listSeparatorLineEdit.setText(list_separator)
self.listSeparatorLineEdit.setPalette(self.getGreyTextPalette(False))
self.listSeparatorCheckBox.setChecked(True)
end_separator = settings.value(u'end separator')
if (end_separator.strip(u'|') == u'') or (end_separator == get_reference_separator(u'sep_e_default')):
self.endSeparatorLineEdit.setText(get_reference_separator(u'sep_e_default'))
self.endSeparatorLineEdit.setPalette(self.getGreyTextPalette(True))
self.endSeparatorCheckBox.setChecked(False)
else:
self.endSeparatorLineEdit.setText(end_separator)
self.endSeparatorLineEdit.setPalette(self.getGreyTextPalette(False))
self.endSeparatorCheckBox.setChecked(True)
self.language_selection = settings.value(u'book name language')
self.languageSelectionComboBox.setCurrentIndex(self.language_selection)
settings.endGroup()
def save(self):
settings = Settings()
settings.beginGroup(self.settingsSection)
settings.setValue(u'display new chapter', self.show_new_chapters)
settings.setValue(u'display brackets', self.display_style)
settings.setValue(u'verse layout style', self.layout_style)
settings.setValue(u'book name language', self.language_selection)
settings.setValue(u'second bibles', self.second_bibles)
settings.setValue(u'bible theme', self.bible_theme)
if self.verseSeparatorCheckBox.isChecked():
settings.setValue(u'verse separator', self.verseSeparatorLineEdit.text())
else:
settings.remove(u'verse separator')
if self.rangeSeparatorCheckBox.isChecked():
settings.setValue(u'range separator', self.rangeSeparatorLineEdit.text())
else:
settings.remove(u'range separator')
if self.listSeparatorCheckBox.isChecked():
settings.setValue(u'list separator', self.listSeparatorLineEdit.text())
else:
settings.remove(u'list separator')
if self.endSeparatorCheckBox.isChecked():
settings.setValue(u'end separator', self.endSeparatorLineEdit.text())
else:
settings.remove(u'end separator')
update_reference_separators()
Receiver.send_message(u'bibles_load_list')
settings.endGroup()
def updateThemeList(self, theme_list):
"""
Called from ThemeManager when the Themes have changed.
``theme_list``
The list of available themes::
[u'Bible Theme', u'Song Theme']
"""
self.bibleThemeComboBox.clear()
self.bibleThemeComboBox.addItem(u'')
self.bibleThemeComboBox.addItems(theme_list)
find_and_set_in_combo_box(self.bibleThemeComboBox, self.bible_theme)
def getGreyTextPalette(self, greyed):
"""
Returns a QPalette with greyed out text as used for placeholderText.
"""
palette = QtGui.QPalette()
color = self.palette().color(QtGui.QPalette.Active, QtGui.QPalette.Text)
if greyed:
color.setAlpha(128)
palette.setColor(QtGui.QPalette.Active, QtGui.QPalette.Text, color)
return palette
|
gpl-2.0
| -5,386,206,835,964,329,000 | 59.229358 | 116 | 0.700419 | false | 4.19154 | false | false | false |
jerome-nexedi/dream
|
dream/simulation/CoreObject.py
|
1
|
47086
|
# ===========================================================================
# Copyright 2013 University of Limerick
#
# This file is part of DREAM.
#
# DREAM is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DREAM is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DREAM. If not, see <http://www.gnu.org/licenses/>.
# ===========================================================================
'''
Created on 12 Jul 2012
@author: George
'''
'''
Class that acts as an abstract. It should have no instances. All the core-objects should inherit from it
'''
# from SimPy.Simulation import Process, Resource, now, SimEvent, waitevent
import simpy
from ManPyObject import ManPyObject
# ===========================================================================
# the core object
# ===========================================================================
class CoreObject(ManPyObject):
class_name = 'Dream.CoreObject'
def __init__(self, id, name, **kw):
ManPyObject.__init__(self,id,name)
self.objName = name
# lists that hold the previous and next objects in the flow
self.next=[] #list with the next objects in the flow
self.previous=[] #list with the previous objects in the flow
self.nextIds=[] #list with the ids of the next objects in the flow
self.previousIds=[] #list with the ids of the previous objects in the flow
#lists to hold statistics of multiple runs
self.Failure=[]
self.Working=[]
self.Blockage=[]
self.Waiting=[]
self.OffShift=[]
self.WaitingForOperator=[]
self.WaitingForLoadOperator=[]
self.Loading = []
self.SettingUp =[]
# list that holds the objectInterruptions that have this element as victim
self.objectInterruptions=[]
#default attributes set so that the CoreObject has them
self.isPreemptive=False
self.resetOnPreemption=False
self.interruptCause=None
self.gatherWipStat=False
# flag used to signal that the station waits for removeEntity event
self.waitEntityRemoval=False
# attributes/indices used for printing the route, hold the cols corresponding to the object (entities route and operators route)
self.station_col_inds=[]
self.op_col_indx=None
# if there is input in a dictionary parse from it
from Globals import G
G.ObjList.append(self) # add object to ObjList
# list of expected signals of a station (values can be used as flags to inform on which signals is the station currently yielding)
self.expectedSignals={
"isRequested":0,
"canDispose":0,
"interruptionStart":0,
"interruptionEnd":0,
"loadOperatorAvailable":0,
"initialWIP":0,
"brokerIsSet":0,
"preemptQueue":0,
"entityRemoved":0,
"entityCreated":0,
"moveEnd":0,
"processOperatorUnavailable":0
}
# flag notifying the the station can deliver entities that ended their processing while interrupted
self.canDeliverOnInterruption=False
# keep wip stats for every replication
self.WipStat=[]
def initialize(self):
from Globals import G
self.env=G.env
self.Up=True #Boolean that shows if the object is in failure ("Down") or not ("up")
self.onShift=True
self.currentEntity=None
# ============================== total times ===============================================
self.totalOperationTime=0 #dummy variable to hold totalWorkin/SetupTime during an interruption (yield ...(self.operation('setup'))
self.totalBlockageTime=0 #holds the total blockage time
self.totalFailureTime=0 #holds the total failure time
self.totalWaitingTime=0 #holds the total waiting time
self.totalWorkingTime=0 #holds the total working time
self.totalOffShiftTime=0 #holds the total off-shift time
self.completedJobs=0 #holds the number of completed jobs
# ============================== Entity related attributes =================================
self.timeLastEntityEnded=0 #holds the last time that an entity ended processing in the object
self.nameLastEntityEnded="" #holds the name of the last entity that ended processing in the object
self.timeLastEntityEntered=0 #holds the last time that an entity entered in the object
self.nameLastEntityEntered="" #holds the name of the last entity that entered in the object
# ============================== shift related times =====================================
self.timeLastShiftStarted=0 #holds the time that the last shift of the object started
self.timeLastShiftEnded=0 #holds the time that the last shift of the object ended
self.offShiftTimeTryingToReleaseCurrentEntity=0 #holds the time that the object was off-shift while trying
#to release the current entity
# ============================== failure related times =====================================
self.timeLastFailure=0 #holds the time that the last failure of the object started
self.timeLastFailureEnded=0 #holds the time that the last failure of the object ended
#processing the current entity
self.downTimeInTryingToReleaseCurrentEntity=0 #holds the time that the object was down while trying
#to release the current entity . This might be due to failure, off-shift, etc
self.timeLastEntityLeft=0 #holds the last time that an entity left the object
self.processingTimeOfCurrentEntity=0 #holds the total processing time that the current entity required
# ============================== waiting flag ==============================================
self.waitToDispose=False #shows if the object waits to dispose an entity
self.isWorkingOnTheLast=False #shows if the object is performing the last processing before scheduled interruption
# ============================== the below are currently used in Jobshop =======================
self.giver=None #the CoreObject that the activeObject will take an Entity from
if len(self.previous)>0:
self.giver=self.previous[0]
self.receiver=None #the CoreObject that the activeObject will give an Entity to
if len(self.next)>0:
self.receiver=self.next[0]
# ============================== variable that is used for the loading of objects =============
self.exitAssignedToReceiver = None # by default the objects are not blocked
# when the entities have to be loaded to operated objects
# then the giverObjects have to be blocked for the time
# that the object is being loaded
# ============================== variable that is used signalling of objects ==================
self.entryAssignedToGiver = None # by default the objects are not blocked
# when the entities have to be received by objects
# then the objects have to be blocked after the first signal they receive
# in order to avoid signalling the same object
# while it has not received the entity it has been originally signalled for
# ============================== lists to hold statistics of multiple runs =====================
self.totalTimeWaitingForOperator=0
self.operatorWaitTimeCurrentEntity=0
self.totalTimeInCurrentEntity=0
self.operatorWaitTimeCurrentEntity=0
self.totalProcessingTimeInCurrentEntity=0
# self.failureTimeInCurrentEntity=0
self.setupTimeCurrentEntity=0
# the time that the object started/ended its wait for the operator
self.timeWaitForOperatorStarted=0
self.timeWaitForOperatorEnded=0
# the time that the object started/ended its wait for the operator
self.timeWaitForLoadOperatorStarted=0
self.timeWaitForLoadOperatorEnded=0
self.totalTimeWaitingForLoadOperator=0
# the time that the operator started/ended loading the object
self.timeLoadStarted=0
self.timeLoadEnded=0
self.totalLoadTime=0
# the time that the operator started/ended setting-up the object
self.timeSetupStarted=0
self.timeSetupEnded=0
self.totalSetupTime=0
# Current entity load/setup/loadOperatorwait/operatorWait related times
self.operatorWaitTimeCurrentEntity=0 # holds the time that the object was waiting for the operator
self.loadOperatorWaitTimeCurrentEntity = 0 # holds the time that the object waits for operator to load the it
self.loadTimeCurrentEntity = 0 # holds the time to load the current entity
self.setupTimeCurrentEntity = 0 # holds the time to setup the object before processing the current entity
self.shouldPreempt=False #flag that shows that the object should preempt or not
self.isProcessingInitialWIP=False #flag that is used only when a object has initial wip
self.lastGiver=None # variable that holds the last giver of the object, used by object in case of preemption
# initialize the wipStatList -
# TODO, think what to do in multiple runs
# TODO, this should be also updated in Globals.setWIP (in case we have initial wip)
import numpy as np
self.wipStatList=np.array([[0,0]])
self.isRequested=self.env.event()
self.canDispose=self.env.event()
self.interruptionEnd=self.env.event()
self.interruptionStart=self.env.event()
self.interruptedBy=None
self.entityRemoved=self.env.event()
self.initialWIP=self.env.event()
# flag used to signal that the station waits for removeEntity event
self.waitEntityRemoval=False
# attributes/indices used for printing the route, hold the cols corresponding to the object (entities route and operators route)
self.station_col_inds=[]
self.op_col_indx=None
# flag that locks the entry of an object so that it cannot receive entities
self.isLocked=False
# flag that shows if the object is processing state at any given time
self.isProcessing=False
# variable that shows what kind of operation is the station performing at the moment
'''
it can be Processing or Setup
XXX: others not yet implemented
'''
self.currentlyPerforming=None
# flag that shows if the object is blocked state at any given time
self.isBlocked=False
self.timeLastBlockageStarted=None
# list of expected signals of a station (values can be used as flags to inform on which signals is the station currently yielding)
self.expectedSignals={
"isRequested":0,
"canDispose":0,
"interruptionStart":0,
"interruptionEnd":0,
"loadOperatorAvailable":0,
"initialWIP":0,
"brokerIsSet":0,
"preemptQueue":0,
"entityRemoved":0,
"entityCreated":0,
"moveEnd":0
}
# lists that keep the start/endShiftTimes of the victim
self.endShiftTimes=[]
self.startShiftTimes=[]
# =======================================================================
# the main process of the core object
# this is dummy, every object must have its own implementation
# =======================================================================
def run(self):
raise NotImplementedError("Subclass must define 'run' method")
# =======================================================================
# sets the routing in and out elements for the Object
# =======================================================================
def defineRouting(self, predecessorList=[], successorList=[]):
self.next=successorList
self.previous=predecessorList
# =======================================================================
# checks if there is anything set as WIP at the begging of the simulation
# and sends an event to initialize the simulation
# =======================================================================
def initialSignalReceiver(self):
if self.haveToDispose():
self.signalReceiver()
def initialAllocationRequest(self):
# TODO if the station is operated, and the operators have skills defined then the SkilledOperatorRouter should be signalled
# XXX: there may be a case where one object is not assigned an operator, in that case we do not want to invoke the allocation routine
if self.checkForDedicatedOperators():
allocationNeeded=False
from Globals import G
for obj in G.MachineList:
if obj.operatorPool!='None':
if obj.operatorPool.operators:
allocationNeeded=False
break
else:
allocationNeeded=True
if allocationNeeded:
self.requestAllocation()
# =======================================================================
# removes an Entity from the Object the Entity to be removed is passed
# as argument by getEntity of the receiver
# =======================================================================
def removeEntity(self, entity=None, resetFlags=True, addBlockage=True):
if addBlockage and self.isBlocked:
# add the blocking time
self.addBlockage()
# reset flags
if resetFlags:
self.isBlocked=False
self.isProcessing=False
activeObjectQueue=self.Res.users
activeObjectQueue.remove(entity) #remove the Entity from the queue
if self.receiver:
self.receiver.appendEntity(entity)
self.downTimeInTryingToReleaseCurrentEntity=0
self.offShiftTimeTryingToReleaseCurrentEntity=0
self.timeLastEntityLeft=self.env.now
self.outputTrace(entity.name, "released "+self.objName)
#append the time to schedule so that it can be read in the result
#remember that every entity has it's schedule which is supposed to be updated every time
# he entity enters a new object
if entity.schedule:
entity.schedule[-1]["exitTime"] = self.env.now
# update wipStatList
if self.gatherWipStat:
import numpy
self.wipStatList=numpy.concatenate((self.wipStatList,[[self.env.now, len(activeObjectQueue)]]))
if self.expectedSignals['entityRemoved']:
self.printTrace(self.id, signal='(removedEntity)')
self.sendSignal(receiver=self, signal=self.entityRemoved)
return entity
#===========================================================================
# appends entity to the receiver object. to be called by the removeEntity of the giver
# this method is created to be overridden by the Assembly class in its getEntity where Frames are loaded
#===========================================================================
def appendEntity(self,entity=None):
activeObjectQueue=self.Res.users
activeObjectQueue.append(entity)
# =======================================================================
# called be getEntity it identifies the Entity
# to be obtained so that
# getEntity gives it to removeEntity as argument
# =======================================================================
def identifyEntityToGet(self):
giverObjectQueue=self.getGiverObjectQueue()
return giverObjectQueue[0]
# =======================================================================
# adds the blockage time to totalBlockageTime
# each time an Entity is removed
# =======================================================================
def addBlockage(self):
if self.timeLastBlockageStarted:
self.totalBlockageTime+=self.env.now-self.timeLastBlockageStarted
# =======================================================================
# gets an entity from the giver
# =======================================================================
def getEntity(self):
# get active object and its queue, as well as the active (to be) entity
#(after the sorting of the entities in the queue of the giver object)
# activeObject=self.getActiveObject()
activeObjectQueue=self.Res.users
# get giver object, its queue, and sort the entities according to this object priorities
giverObject=self.giver
giverObject.sortEntities() #sort the Entities of the giver
#according to the scheduling rule if applied
giverObject.sortEntitiesForReceiver(self)
giverObjectQueue=giverObject.Res.users
# if the giverObject is blocked then unBlock it
if giverObject.exitIsAssignedTo():
giverObject.unAssignExit()
# if the activeObject entry is blocked then unBlock it
if self.entryIsAssignedTo():
self.unAssignEntry()
activeEntity=self.identifyEntityToGet()
activeEntity.currentStation=self
# update the receiver of the giverObject
giverObject.receiver=self
# remove entity from the giver
activeEntity = giverObject.removeEntity(entity=self.identifyEntityToGet())
# variable that holds the last giver; used in case of preemption
self.lastGiver=self.giver
# #get the entity from the previous object and put it in front of the activeQ
# activeObjectQueue.append(activeEntity)
#append the time to schedule so that it can be read in the result
#remember that every entity has it's schedule which is supposed to be updated every time
# the entity enters a new object
activeEntity.schedule.append({"station": self,
"entranceTime": self.env.now})
#update variables
activeEntity.currentStation=self
self.timeLastEntityEntered=self.env.now
self.nameLastEntityEntered=activeEntity.name # this holds the name of the last entity that got into object
# update the next list of the object
self.updateNext(activeEntity)
self.outputTrace(activeEntity.name, "got into "+self.objName)
self.printTrace(activeEntity.name, enter=self.id)
# # if there are entities with requiredParts then check whether the requirements are fulfilled for them to proceed
# # ass soon as a "buffer" receives an entity it controls if the entity is requested elsewhere,
# # then it checks if there other requested entities by the same requesting entity.
# # Finally, it is controlled whether all the requested parts have concluded
# # their sequences for the requesting entity
# from Globals import G
# # for all the entities in the EntityList
# for entity in G.EntityList:
# requiredParts=entity.getRequiredParts()
# if requiredParts:
# # if the activeEntity is in the requierdParts of the entity
# if activeEntity in requiredParts:
# # if the entity that requires the activeEntity can proceed then signal the currentStation of the entity
# if entity.checkIfRequiredPartsReady() and entity.currentStation.expectedSignals['canDispose']:
# entity.mayProceed=True
# self.sendSignal(receiver=entity.currentStation, signal=entity.currentStation.canDispose)
# if the object (eg Queue) canAccept then signal the Giver
if self.canAccept():
self.signalGiver()
return activeEntity
#===========================================================================
# updates the next list of the object
#===========================================================================
def updateNext(self, entity=None):
pass
#===========================================================================
# check whether there is a critical entity to be disposed
# and if preemption is required
#===========================================================================
def preemptReceiver(self):
activeObjectQueue=self.Res.users
# find a critical order if any
critical=False
for entity in activeObjectQueue:
if entity.isCritical:
activeEntity=entity
critical=True
break
if critical:
# pick a receiver
receiver=None
if any(object for object in self.next if object.isPreemptive and object.checkIfActive()):
receiver=next(object for object in self.next if object.isPreemptive and object.checkIfActive())
# if there is any receiver that can be preempted check if it is operated
if receiver:
receiverOperated=False # local variable to inform if the receiver is operated for Loading
try:
from MachineJobShop import MachineJobShop
from MachineManagedJob import MachineManagedJob
# TODO: implement preemption for simple machines
if receiver.operatorPool\
and isinstance(receiver, MachineJobShop) or\
isinstance(receiver, MachineManagedJob):
# and the operationType list contains Load, the receiver is operated
if (receiver.operatorPool!="None")\
and any(type=="Load" for type in receiver.multOperationTypeList):
receiverOperated=True
except:
pass
# if the obtained Entity is critical and the receiver is preemptive and not operated
# in the case that the receiver is operated the preemption is performed by the operators
# if the receiver is not Up then no preemption will be performed
if not receiverOperated and len(receiver.Res.users)>0:
#if the receiver does not hold an Entity that is also critical
if not receiver.Res.users[0].isCritical:
receiver.shouldPreempt=True
self.printTrace(self.id, preempt=receiver.id)
receiver.preempt()
receiver.timeLastEntityEnded=self.env.now #required to count blockage correctly in the preemptied station
# sort so that the critical entity is placed in front
activeObjectQueue.sort(key=lambda x: x==activeEntity, reverse=True)
# if there is a critical entity and the possible receivers are operated then signal the Router
elif receiverOperated:
self.signalRouter(receiver)
activeObjectQueue.sort(key=lambda x: x==activeEntity, reverse=True)
# update wipStatList
if self.gatherWipStat:
import numpy
self.wipStatList=numpy.concatenate((self.wipStatList,[[self.env.now, len(activeObjectQueue)]]))
#===========================================================================
# find possible receivers
#===========================================================================
@staticmethod
def findReceiversFor(activeObject):
receivers=[]
for object in [x for x in activeObject.next if x.canAccept(activeObject) and not x.isRequested.triggered and x.expectedSignals['isRequested']]:
receivers.append(object)
return receivers
# =======================================================================
# signal the successor that the object can dispose an entity
# =======================================================================
def signalReceiver(self):
possibleReceivers=self.findReceiversFor(self)
if possibleReceivers:
receiver=self.selectReceiver(possibleReceivers)
receiversGiver=self
# perform the checks that canAcceptAndIsRequested used to perform and update activeCallersList or assignExit and operatorPool
while not receiver.canAcceptAndIsRequested(receiversGiver):
possibleReceivers.remove(receiver)
if not possibleReceivers:
receiversGiver=None
receiver=None
# if no receiver can accept then try to preempt a receive if the stations holds a critical order
self.preemptReceiver()
return False
receiver=self.selectReceiver(possibleReceivers)
receiversGiver=self
# sorting the entities of the object for the receiver
self.sortEntitiesForReceiver(receiver)
# signalling the Router if the receiver is operated and not assigned an operator
if self.signalRouter(receiver):
return False
self.receiver=receiver
self.receiver.giver=self
self.printTrace(self.id, signalReceiver=self.receiver.id)
# assign the entry of the receiver
self.receiver.assignEntryTo()
# assign the exit of the current object to the receiver
self.assignExitTo(self.receiver)
if self.receiver.expectedSignals['isRequested']:
self.sendSignal(receiver=self.receiver, signal=self.receiver.isRequested)
return True
# if no receiver can accept then try to preempt a receive if the stations holds a critical order
self.preemptReceiver()
return False
# =======================================================================
# select a receiver Object
# =======================================================================
@staticmethod
def selectReceiver(possibleReceivers=[]):
candidates=possibleReceivers
# dummy variables that help prioritize the objects requesting to give objects to the object (activeObject)
maxTimeWaiting=0 # dummy variable counting the time a successor is waiting
receiver=None
from Globals import G
for object in candidates:
timeWaiting=G.env.now-object.timeLastEntityLeft # the time it has been waiting is updated and stored in dummy variable timeWaiting
if(timeWaiting>maxTimeWaiting or maxTimeWaiting==0):# if the timeWaiting is the maximum among the ones of the successors
maxTimeWaiting=timeWaiting
receiver=object # set the receiver as the longest waiting possible receiver
return receiver
#===========================================================================
# sort the entities of the queue for the receiver
#===========================================================================
def sortEntitiesForReceiver(self, receiver=None):
pass
#===========================================================================
# find possible givers
#===========================================================================
@staticmethod
def findGiversFor(activeObject):
givers=[]
for object in [x for x in activeObject.previous if(not x is activeObject) and not x.canDispose.triggered and
(x.expectedSignals['canDispose'] or
(x.canDeliverOnInterruption and x.timeLastShiftEnded==x.env.now))]: # extra check.If shift ended right now and the object
# can unload we relax the canDispose flag
if object.haveToDispose(activeObject):
givers.append(object)
return givers
# =======================================================================
# signal the giver that the entity is removed from its internalQueue
# =======================================================================
def signalGiver(self):
possibleGivers=self.findGiversFor(self)
if possibleGivers:
giver=self.selectGiver(possibleGivers)
giversReceiver=self
# perform the checks that canAcceptAndIsRequested used to perform and update activeCallersList or assignExit and operatorPool
while not self.canAcceptAndIsRequested(giver):
possibleGivers.remove(giver)
if not possibleGivers:
return False
giver=self.selectGiver(possibleGivers)
giversReceiver=self
self.giver=giver
self.giver.receiver=self
if self.giver.expectedSignals['canDispose'] or (self.giver.canDeliverOnInterruption
and self.giver.timeLastShiftEnded==self.env.now): # extra check.If shift ended right now and the object
# can unload we relax the canDispose flag
self.sendSignal(receiver=self.giver, signal=self.giver.canDispose)
self.printTrace(self.id, signalGiver=self.giver.id)
return True
return False
# =======================================================================
# select a giver Object
# =======================================================================
@staticmethod
def selectGiver(possibleGivers=[]):
candidates=possibleGivers
# dummy variables that help prioritize the objects requesting to give objects to the object (activeObject)
maxTimeWaiting=0 # dummy variable counting the time a predecessor is blocked
giver=None
from Globals import G
# loop through the possible givers to see which have to dispose and which is the one blocked for longer
for object in candidates:
# calculate how much the giver is waiting
timeWaiting=G.env.now-object.timeLastEntityEnded
if(timeWaiting>=maxTimeWaiting):
giver=object # the object to deliver the Entity to the activeObject is set to the ith member of the previous list
maxTimeWaiting=timeWaiting
return giver
# =======================================================================
# actions to be taken after the simulation ends
# =======================================================================
def postProcessing(self, MaxSimtime=None):
if MaxSimtime==None:
from Globals import G
MaxSimtime=G.maxSimTime
activeObject=self.getActiveObject()
activeObjectQueue=self.getActiveObjectQueue()
import numpy
self.wipStatList=numpy.concatenate((self.wipStatList,[[self.env.now, len(activeObjectQueue)]]))
#calculate the offShift time for current entity
offShiftTimeInCurrentEntity=0
if self.interruptedBy:
if self.onShift==False: # and self.interruptedBy=='ShiftScheduler':
offShiftTimeInCurrentEntity=self.env.now-activeObject.timeLastShiftEnded
if self.isBlocked:
self.addBlockage()
#if object is currently processing an entity we should count this working time
if self.isProcessing:
'''XXX currentlyPerforming can be Setup or Processing '''
if self.currentlyPerforming:
if self.currentlyPerforming=='Setup':
activeObject.totalSetupTime+=self.env.now-self.timeLastOperationStarted
else:
activeObject.totalWorkingTime+=self.env.now-self.timeLastOperationStarted
else:
activeObject.totalWorkingTime+=self.env.now-self.timeLastProcessingStarted
# activeObject.totalTimeWaitingForOperator+=activeObject.operatorWaitTimeCurrentEntity
# if object is down we have to add this failure time to its total failure time
if self.Up==False:
if self.onShift:
activeObject.totalFailureTime+=self.env.now-activeObject.timeLastFailure
# if object is off shift add only the fail time before the shift ended
if not self.onShift and self.timeLastFailure < self.timeLastShiftEnded:
self.totalFailureTime+=self.timeLastShiftEnded-self.timeLastFailure
#if the object is off shift,add this to the off-shift time
if activeObject.onShift==False:
# if we ran the simulation for infinite time we have to identify the last event
now=self.env.now
if now==float('inf'):
now=0
lastExits=[]
for object in G.ExitList:
lastExits.append(object.timeLastEntityEntered)
if lastExits:
now=max(lastExits)
self.totalOffShiftTime+=now-self.timeLastShiftEnded
#object was idle when it was not in any other state
activeObject.totalWaitingTime=MaxSimtime-activeObject.totalWorkingTime-activeObject.totalBlockageTime-activeObject.totalFailureTime-activeObject.totalLoadTime-activeObject.totalSetupTime-self.totalOffShiftTime
if activeObject.totalBlockageTime<0 and activeObject.totalBlockageTime>-0.00001: #to avoid some effects of getting negative cause of rounding precision
self.totalBlockageTime=0
if activeObject.totalWaitingTime<0 and activeObject.totalWaitingTime>-0.00001: #to avoid some effects of getting negative cause of rounding precision
self.totalWaitingTime=0
activeObject.Failure.append(100*self.totalFailureTime/MaxSimtime)
activeObject.Blockage.append(100*self.totalBlockageTime/MaxSimtime)
activeObject.Waiting.append(100*self.totalWaitingTime/MaxSimtime)
activeObject.Working.append(100*self.totalWorkingTime/MaxSimtime)
activeObject.WaitingForOperator.append(100*self.totalTimeWaitingForOperator/MaxSimtime)
activeObject.WaitingForLoadOperator.append(100*self.totalTimeWaitingForLoadOperator/MaxSimtime)
activeObject.Loading.append(100*self.totalLoadTime/MaxSimtime)
activeObject.SettingUp.append(100*self.totalSetupTime/MaxSimtime)
activeObject.OffShift.append(100*self.totalOffShiftTime/MaxSimtime)
activeObject.WipStat.append(self.wipStatList.tolist())
# =======================================================================
# outputs results to JSON File
# =======================================================================
def outputResultsJSON(self):
pass
# =======================================================================
# checks if the Object can dispose an entity to the following object
# =======================================================================
def haveToDispose(self, callerObject=None):
activeObjectQueue=self.Res.users
return len(activeObjectQueue)>0
# =======================================================================
# checks if the Object can accept an entity and there is an entity
# in some possible giver waiting for it
# =======================================================================
def canAcceptAndIsRequested(self,callerObject=None):
pass
# =======================================================================
# checks if the Object can accept an entity
# =======================================================================
def canAccept(self, callerObject=None):
pass
#===========================================================================
# method used to check whether the station is a successor of the caller
#===========================================================================
def isInRouteOf(self, callerObject=None):
thecaller=callerObject
# if the caller is not defined then return True. We are only interested in checking whether
# the station can accept whatever entity from whichever giver
if not thecaller:
return True
#check it the caller object is predecessor to the activeObject
if thecaller in self.previous:
return True
return False
# =======================================================================
# sorts the Entities in the activeQ of the objects
# =======================================================================
def sortEntities(self):
pass
# =======================================================================
# get the active object. This always returns self
# =======================================================================
def getActiveObject(self):
return self
# =======================================================================
# get the activeQ of the active object.
# =======================================================================
def getActiveObjectQueue(self):
return self.Res.users
# =======================================================================
# get the giver object in a getEntity transaction.
# =======================================================================
def getGiverObject(self):
return self.giver
# =======================================================================
# get the giver object queue in a getEntity transaction.
# =======================================================================
def getGiverObjectQueue(self):
return self.giver.Res.users
# =======================================================================
# get the receiver object in a removeEntity transaction.
# =======================================================================
def getReceiverObject(self):
return self.receiver
# =======================================================================
# get the receiver object queue in a removeEntity transaction.
# =======================================================================
def getReceiverObjectQueue(self):
return self.receiver.Res.users
# =======================================================================
# calculates the processing time
# =======================================================================
def calculateProcessingTime(self):
# this is only for processing of the initial wip
if self.isProcessingInitialWIP:
activeEntity=self.getActiveObjectQueue()[0]
if activeEntity.remainingProcessingTime:
remainingProcessingTime=activeEntity.remainingProcessingTime
from RandomNumberGenerator import RandomNumberGenerator
initialWIPrng=RandomNumberGenerator(self, remainingProcessingTime)
return initialWIPrng.generateNumber()
return self.rng.generateNumber() # this is if we have a default processing time for all the entities
#===========================================================================
# calculates time (running through a dictionary) according to the type of processing given as argument
#===========================================================================
def calculateTime(self,type='Processing'):
return {
'Load': self.loadRng.generateNumber,
'Setup': self.stpRng.generateNumber,
'Processing': self.calculateProcessingTime
}[type]()
# =======================================================================
# checks if the object is blocked
# =======================================================================
def exitIsAssignedTo(self):
return self.exitAssignedToReceiver
# =======================================================================
# assign Exit of the object
# =======================================================================
def assignExitTo(self, callerObject=None):
self.exitAssignedToReceiver=callerObject
# =======================================================================
# unblock the object
# =======================================================================
def unAssignExit(self):
self.exitAssignedToReceiver = None
# =======================================================================
# checks if the object is blocked
# =======================================================================
def entryIsAssignedTo(self):
return self.entryAssignedToGiver
# =======================================================================
# assign Exit of the object
# =======================================================================
def assignEntryTo(self):
self.entryAssignedToGiver = self.giver
# =======================================================================
# unblock the object
# =======================================================================
def unAssignEntry(self):
self.entryAssignedToGiver = None
# =======================================================================
# actions to be carried whenever the object is interrupted
# (failure, break, preemption, etc)
# =======================================================================
def interruptionActions(self):
pass
# =======================================================================
# actions to be carried whenever the object recovers
# control after an interruption (failure, break, preemption, etc)
# =======================================================================
def postInterruptionActions(self):
pass
# =======================================================================
# method to execute preemption
# =======================================================================
def preempt(self):
#ToDO make a generic method
pass
# =======================================================================
# checks if the object is in an active position
# =======================================================================
def checkIfActive(self):
return self.Up and self.onShift
#===========================================================================
# filter that returns True if the activeObject Queue is empty and
# false if object holds entities in its queue
#===========================================================================
def activeQueueIsEmpty(self):
return len(self.Res.users)==0
# =======================================================================
# actions to be carried out when the processing of an Entity ends
# =======================================================================
def endOperationActions(self):
pass
#===========================================================================
# check if an entity is in the internal Queue of the object
#===========================================================================
def isInActiveQueue(self, entity=None):
activeObjectQueue = self.Res.users
return any(x==entity for x in activeObjectQueue)
|
gpl-3.0
| -88,395,979,137,123,230 | 53.687573 | 217 | 0.514102 | false | 5.539529 | false | false | false |
winhamwr/neckbeard
|
neckbeard/cloud_resource.py
|
1
|
19469
|
import logging
import time
import boto.exception
import dateutil.parser
import requests
from boto.ec2 import elb
from requests.exceptions import (
ConnectionError,
Timeout,
RequestException,
)
from simpledb import models
from neckbeard.output import fab_out_opts
NODE_AWS_TYPES = ['ec2', 'rds', 'elb']
EC2_RETIRED_STATES = ['shutting-down', 'terminated']
RDS_RETIRED_STATES = ['deleted']
logger = logging.getLogger('cloud_resource')
fab_output_hides = fab_out_opts[logger.getEffectiveLevel()]
fab_quiet = fab_output_hides + ['stderr']
# This is just a non-functional place to track configuration options to provide
# a starting point once we add actual validation
REQUIRED_CONFIGURATION = {
'ec2': [
'aws.keypair',
],
}
OPTIONAL_CONFIGURATION = {
'ec2': [
'aws.elastic_ip',
],
}
class InfrastructureNode(models.Model):
nodename = models.ItemName()
generation_id = models.NumberField(required=True)
# The environment name. Eg. test, beta, staging, live
deployment_name = models.Field(required=True)
# Type of node. Eg. ec2, rds, elb
aws_type = models.Field(required=True)
# Unique AWS id. Eg. `i-xxxxxx`
aws_id = models.Field(required=True)
# Unique ID within this generation of a deployment
# This determine which configuration is pulled
name = models.Field(required=True)
creation_date = models.DateTimeField(required=True)
is_running = models.NumberField(default=1, required=True)
# Is this generation the currently-active generation
is_active_generation = models.NumberField(default=0, required=True)
# Whether or not we've completed the first deploy on this node
# Used to allow the first deploy to differ from subsequent deploys
# for one-time operations per node. Idempotency is preferred, but this is a
# shortcut towards some speed improvements. We only need to do EBS volume
# mounting on the first run, for example.
initial_deploy_complete = models.NumberField(default=0, required=True)
def __init__(self, *args, **kwargs):
self.ec2conn = None
self.rdsconn = None
self.elbconn = None
self._boto_instance = None
self._deployment_info = None
super(InfrastructureNode, self).__init__(*args, **kwargs)
def __str__(self):
if self.aws_type in NODE_AWS_TYPES:
output_str = '%s:%s[%s]<%s>' % (
self.aws_type,
self.name,
self.aws_id,
self.creation_date,
)
return output_str
return super(InfrastructureNode, self).__str__()
def save(self):
# Until this is well-tested, I don't want anyone running this code and
# actually writing to a SimpleDB Domain. This is a "permanent mock"
# until we think this functionality is safe/stable
logger.critical("Called save on %s", self)
return
def get_status_output(self):
"""
Provide a detailed string representation of the instance with its
current operational/health status.
"""
if self.aws_type in NODE_AWS_TYPES:
status_str = ''
if not self.is_running:
status_str += 'RETIRED-'
else:
if self.is_operational:
status_str += 'UP-'
else:
status_str += 'INACTIVE-'
if not self.is_healthy:
status_str += 'UNHEALTHY-'
return "%s-%s" % (status_str, self)
return "UNKNOWN-%s" % self
def set_aws_conns(self, ec2conn, rdsconn):
self.ec2conn = ec2conn
self.rdsconn = rdsconn
def set_deployment_info(self, deployment_info):
self._deployment_info = deployment_info
def is_actually_running(self):
"""
Checks AWS to ensure this node hasn't been terminated.
"""
if self.aws_type == 'ec2':
if self.boto_instance:
if self.boto_instance.state not in EC2_RETIRED_STATES:
return True
elif self.aws_type == 'rds':
if self.boto_instance:
if self.boto_instance.status not in RDS_RETIRED_STATES:
return True
return False
def terminate(self):
if (self.is_active_generation and self.is_operational):
raise Exception("Can't hard-terminate an active, operational node")
if self.aws_type == 'ec2':
if self.is_actually_running():
self.boto_instance.terminate()
elif self.aws_type == 'rds':
if self.is_actually_running():
final_snapshot = self._deployment_info.get(
'final_snapshot',
None,
)
if final_snapshot:
self.boto_instance.stop(
skip_final_snapshot=False,
final_snapshot_id=final_snapshot,
)
else:
self.boto_instance.stop(
skip_final_snapshot=True, final_snapshot_id=None)
self.is_running = 0
self.save()
def retire(self):
"""
Mark this node as retired and no longer used. Useful for hung nodes.
"""
if (self.is_active_generation and self.is_operational):
raise Exception("Can't retire an active, operational node")
self.is_running = 0
self.save()
def make_temporarily_inoperative(self):
"""
Make the given node temporarily inoperative in preperation for putting
it back in to operation shortly after.
This is the call to use for things like rotating in and out of the
loadbalancer. ``make_fully_inoperative`` should be used for planned
long-term inoperability.
"""
if self.aws_type == 'ec2':
self._remove_from_loadbalancer()
elif self.aws_type == 'rds':
pass
def _remove_from_loadbalancer(self):
"""
If this node is in a loadbalancer, remove it from that loadbalancer.
"""
if self.aws_type != 'ec2':
return
loadbalancer = self.get_loadbalancer()
if not loadbalancer:
return
# Check if this instance is even in the load balancer
if not self._instance_in_load_balancer():
logger.debug(
"_remove_from_loadbalancer: Instance %s not in loadbalancer",
self.boto_instance,
)
return
logger.info(
"Removing node from loadbalancer: %s",
loadbalancer,
)
loadbalancer.deregister_instances([self.aws_id])
def make_fully_inoperative(self):
"""
Make the given node fully inoperative. This is the call to use for
planned long-term inoperability. ``make_temporarily_inoperative``
is more useful for temporary inoperability (such as rotating in
and out of the loadbalancer).
"""
if self.aws_type == 'ec2':
elastic_ip = self.get_elastic_ip()
if elastic_ip and elastic_ip.instance_id:
if elastic_ip.instance_id == self.boto_instance.id:
logger.info(
"Dissociating elastic IP %s from instance %s",
elastic_ip,
elastic_ip.instance_id,
)
self.ec2conn.disassociate_address(elastic_ip.public_ip)
self._remove_from_loadbalancer()
elif self.aws_type == 'rds':
pass
def refresh_boto_instance(self):
self._boto_instance = None
@property
def boto_instance(self):
if not self._boto_instance:
if self.aws_type == 'ec2':
reservations = self.ec2conn.get_all_instances(
instance_ids=[self.aws_id])
if len(reservations) == 1:
self._boto_instance = reservations[0].instances[0]
elif self.aws_type == 'rds':
try:
db_instances = self.rdsconn.get_all_dbinstances(
instance_id=self.aws_id)
except boto.exception.BotoServerError:
return self._boto_instance
if len(db_instances) == 1:
self._boto_instance = db_instances[0]
return self._boto_instance
@property
def launch_time(self):
if not self.boto_instance:
return None
if self.aws_type == 'ec2':
return dateutil.parser.parse(self.boto_instance.launch_time)
elif self.aws_type == 'rds':
return dateutil.parser.parse(self.boto_instance.create_time)
def _instance_in_load_balancer(self):
"""
Determine if this instance is in its current loadbalancer.
"""
loadbalancer = self.get_loadbalancer()
if self.boto_instance is None:
return False
if loadbalancer is None:
return False
# The comparator between instances do not necessarily work, compare by
# id instead.
ids_in_lb = [i.id for i in loadbalancer.instances]
return self.boto_instance.id in ids_in_lb
@property
def is_operational(self):
"""
Is this instance fully operational as defined by the deployment info.
ie. is it in the loadbalancer with the correct ip or is it active with
no pending rds config values
"""
if not self.boto_instance:
return False
if not self._deployment_info:
logger.critical(
"No deployment configuration found for node: %s",
self,
)
logger.critical(
"Unable to determine operational status. "
"Assuming NOT operational."
)
return False
if self.aws_type == 'ec2':
key_name = self._deployment_info['aws']['keypair']
elastic_ip = self.get_elastic_ip()
loadbalancer = self.get_loadbalancer()
if self.boto_instance.state != 'running':
logger.debug(
"is_operational: Instance %s not running",
self.boto_instance,
)
return False
if self.boto_instance.key_name != key_name:
logger.debug(
"is_operational: Instance %s has wrong key",
self.boto_instance,
)
return False
if elastic_ip:
if self.boto_instance.id != elastic_ip.instance_id:
logger.debug(
"is_operational: Instance %s has wrong elastic ip",
self.boto_instance,
)
return False
if loadbalancer:
if not self._instance_in_load_balancer():
logger.debug(
"is_operational: Instance %s not in loadbalancer",
self.boto_instance,
)
logger.debug(
'Instances in loadbalancer: %s',
loadbalancer.instances,
)
return False
health_list = loadbalancer.get_instance_health(
instances=[self.aws_id])
assert len(health_list) == 1
if health_list[0].state != 'InService':
logger.debug(
"is_operational: Node %s not healthy in loadbalancer.",
self.boto_instance,
)
logger.debug("LB health state: %s", health_list[0].state)
return False
return True
elif self.aws_type == 'rds':
if self.boto_instance.status != 'available':
logger.debug(
"is_operational: Instance %s not available",
self.boto_instance,
)
return False
# TODO: add checks for pending values and matching params
return True
return False
def get_health_check_url(self):
if 'health_check' not in self._deployment_info:
return None
if not self.boto_instance.public_dns_name:
logger.debug(
"No health check url due to no public dns name",
)
return None
health_check = self._deployment_info['health_check']
status_url = health_check['status_url']
status_url = 'http://%s%s' % (
self.boto_instance.public_dns_name,
status_url,
)
return status_url
def passes_health_check(self):
"""
Does this node currently pass the `health_check` as defined in its
configuration.
If no `health_check` is defined, returns True.
"""
status_url = self.get_health_check_url()
if not status_url:
logger.info("No health check defined. Assuming healthy.")
return True
health_check = self._deployment_info['health_check']
status_success_string = health_check['status_contains']
timeout = health_check['status_check_timeout']
try:
site_status = requests.get(status_url, timeout=timeout)
except ConnectionError:
logger.info("health_check unavailable for %s", self)
logger.debug("status url: %s", status_url)
return False
except Timeout:
logger.info("health_check timed out for %s", self)
logger.debug("status url: %s", status_url)
return False
except RequestException, e:
logger.info("health_check raised exception for %s", self)
logger.debug("status url: %s", status_url)
logger.debug("Exception: %s", e)
return False
if status_success_string not in site_status.text:
logger.debug(
"Required string not present in health_check for %s",
self,
)
logger.debug("status url: %s", status_url)
logger.debug("Required string: %s", status_success_string)
return False
return True
@property
def is_healthy(self):
"""
Is this instance healthy according to its status checks. Healthy nodes
are ready to perform their function, regardles of whether or not
they're currently in operation (in the Loadbalancer, with the proper
IP, etc).
"""
if not self.boto_instance:
return False
if not self._deployment_info:
logger.critical(
"No deployment configuration found for node: %s",
self,
)
logger.critical(
"Unable to determine health status. "
"Assuming NOT healthy."
)
return False
if self.aws_type == 'ec2':
key_name = self._deployment_info['aws']['keypair']
if self.boto_instance.state != 'running':
logger.debug(
"is_healthy: Instance %s not running",
self.boto_instance,
)
return False
elif self.boto_instance.key_name != key_name:
logger.debug(
"is_healthy: Instance %s has wrong key",
self.boto_instance,
)
return False
return self.passes_health_check()
elif self.aws_type == 'rds':
if self.boto_instance.status != 'available':
logger.debug("Instance %s not available" % self.boto_instance)
return False
# TODO: Check to ensure no pending values and that params match
return True
return False
def make_operational(self, force_operational=False):
if not force_operational:
if not self.is_healthy or not self.is_active_generation:
raise Exception(
"Only health nodes in the active generation "
"can be made operational"
)
if self.aws_type == 'ec2':
elastic_ip = self.get_elastic_ip()
loadbalancer = self.get_loadbalancer()
if elastic_ip and elastic_ip.instance_id:
if elastic_ip.instance_id != self.boto_instance.id:
logger.info(
"Dissociating elastic IP %s from instance %s",
elastic_ip,
elastic_ip.instance_id,
)
self.ec2conn.disassociate_address(elastic_ip.public_ip)
# Switch the elastic IP
if elastic_ip and elastic_ip.instance_id != self.boto_instance.id:
logger.info(
"Pointing IP %s to %s",
elastic_ip.public_ip,
self.boto_instance,
)
while elastic_ip.instance_id != self.boto_instance.id:
self.boto_instance.use_ip(elastic_ip)
elastic_ip = self.get_elastic_ip()
logger.info(
"Waiting 5s for ip %s to associated to %s",
elastic_ip,
self.boto_instance,
)
time.sleep(5)
logger.info(
"IP %s succesfully associated to %s",
elastic_ip,
self.boto_instance,
)
# Stick the instance in the loadbalancer
if loadbalancer:
logger.info(
"Placing node <%s> in to loadbalancer <%s>",
self,
loadbalancer,
)
loadbalancer.register_instances([self.boto_instance.id])
elif self.aws_type == 'rds':
pass
def get_loadbalancer(self):
if not self.elbconn:
self.elbconn = elb.ELBConnection(
self.ec2conn.aws_access_key_id,
self.ec2conn.aws_secret_access_key)
if not self._deployment_info.get('loadbalancer', None):
return None
elb_list = self.elbconn.get_all_load_balancers(
load_balancer_names=[self._deployment_info['loadbalancer']])
assert len(elb_list) == 1
return elb_list[0]
def get_elastic_ip(self):
configured_ip = self._deployment_info['aws'].get('elastic_ip')
if not configured_ip:
return None
ips = self.ec2conn.get_all_addresses(
[configured_ip],
)
assert len(ips) == 1
return ips[0]
def set_initial_deploy_complete(self):
"""
Record that the initial deployment operation has completed
succesfully.
"""
self.initial_deploy_complete = 1
self.save()
def verify_running_state(self):
if self.is_running == 1 and not self.is_actually_running():
self.is_running = 0
self.save()
|
bsd-3-clause
| 5,361,584,043,886,388,000 | 33.519504 | 79 | 0.538805 | false | 4.465367 | true | false | false |
tjcsl/cslbot
|
cslbot/helpers/handler.py
|
1
|
28231
|
# -*- coding: utf-8 -*-
# Copyright (C) 2013-2018 Samuel Damashek, Peter Foley, James Forcier, Srijay Kasturi, Reed Koser, Christopher Reffett, and Tris Wilson
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
# USA.
import base64
import collections
import configparser
import copy
import logging
import random
import re
import threading
import time
from datetime import datetime, timedelta
from typing import Callable, Dict, List
import irc
from . import (acl, arguments, control, identity, misc, orm, registry, sql,
textutils, workers)
logger = logging.getLogger(__name__)
class BotHandler(object):
def __init__(self, config: configparser.ConfigParser, connection: irc.client.ServerConnection, channels: List[str], confdir: str, idx: int):
"""Set everything up.
| kick_enabled controls whether the bot will kick people or not.
| abuselist is a dict keeping track of how many times nicks have used
| rate-limited commands.
| modules is a dict containing the commands the bot supports.
| confdir is the path to the directory where the bot's config is stored.
| db - Is a db wrapper for data storage.
"""
self.connection = connection
self.channels = channels
self.config = config
self.idx = idx
self.db = sql.Sql(config, confdir)
# FIXME: don't pass in self
self.workers = workers.Workers(self)
self.guarded: List[str] = []
self.voiced: Dict[str, Dict[str, bool]] = collections.defaultdict(dict)
self.opers: Dict[str, Dict[str, bool]] = collections.defaultdict(dict)
self.features = {'account-notify': False, 'extended-join': False, 'whox': False}
start = datetime.now()
self.uptime = {'start': start, 'reloaded': start}
self.abuselist: Dict[str, Dict[str, datetime]] = {}
self.ping_map: Dict[str, str] = {}
self.outputfilter: Dict[str, List[Callable[[str], str]]] = collections.defaultdict(list)
self.kick_enabled = True
self.who_map: Dict[int, str] = {}
self.flood_lock = threading.Lock()
self.data_lock = threading.RLock()
self.last_msg_time = datetime.now()
self.confdir = confdir
self.log_to_ctrlchan = False
def get_data(self):
"""Saves the handler's data for :func:`.reloader.do_reload`"""
data = {}
data['guarded'] = self.guarded[:]
data['voiced'] = copy.deepcopy(self.voiced)
data['opers'] = copy.deepcopy(self.opers)
data['features'] = self.features.copy()
data['uptime'] = self.uptime.copy()
data['abuselist'] = self.abuselist.copy()
data['who_map'] = self.who_map.copy()
return data
def set_data(self, data):
"""Called from :func:`.reloader.do_reload` to restore the handler's data."""
for key, val in data.items():
setattr(self, key, val)
self.uptime['reloaded'] = datetime.now()
def update_authstatus(self, nick):
if self.features['whox']:
tag = random.randint(0, 999)
self.who_map[tag] = nick
self.send_who(nick, tag)
elif self.config['feature']['servicestype'] == "ircservices":
self.rate_limited_send('privmsg', 'NickServ', 'STATUS %s' % nick)
elif self.config['feature']['servicestype'] == "atheme":
self.rate_limited_send('privmsg', 'NickServ', 'ACC %s' % nick)
def send_who(self, target, tag):
# http://faerion.sourceforge.net/doc/irc/whox.var
# n(show nicknames), a(show nickserv status), f(show channel status/modes), t(show tag)
self.rate_limited_send('who', '{} %naft,{}'.format(target, tag))
def is_admin(self, send, nick, required_role='admin'):
"""Checks if a nick is a admin.
If NickServ hasn't responded yet, then the admin is unverified,
so assume they aren't a admin.
"""
# If the required role is None, bypass checks.
if not required_role:
return True
# Current roles are admin and owner, which is a superset of admin.
with self.db.session_scope() as session:
admin = session.query(orm.Permissions).filter(orm.Permissions.nick == nick).first()
if admin is None:
return False
# owner implies admin, but not the other way around.
if required_role == "owner" and admin.role != "owner":
return False
# no nickserv support, assume people are who they say they are.
if not self.config['feature'].getboolean('nickserv'):
return True
if not admin.registered:
self.update_authstatus(nick)
# We don't necessarily want to complain in all cases.
if send is not None:
send("Unverified admin: %s" % nick, target=self.config['core']['channel'])
return False
else:
if not self.features['account-notify']:
# reverify every 5min if we don't have the notification feature.
if datetime.now() - admin.time > timedelta(minutes=5):
self.update_authstatus(nick)
return True
def get_admins(self):
"""Check verification for all admins."""
# no nickserv support, assume people are who they say they are.
if not self.config['feature'].getboolean('nickserv'):
return
with self.db.session_scope() as session:
for a in session.query(orm.Permissions).all():
if not a.registered:
self.update_authstatus(a.nick)
def abusecheck(self, send, nick, target, limit, cmd):
""" Rate-limits commands.
| If a nick uses commands with the limit attr set, record the time
| at which they were used.
| If the command is used more than `limit` times in a
| minute, ignore the nick.
"""
if nick not in self.abuselist:
self.abuselist[nick] = {}
if cmd not in self.abuselist[nick]:
self.abuselist[nick][cmd] = [datetime.now()]
else:
self.abuselist[nick][cmd].append(datetime.now())
count = 0
for x in self.abuselist[nick][cmd]:
# 60 seconds - arbitrary cuttoff
if datetime.now() - x < timedelta(seconds=60):
count = count + 1
if count > limit:
msg = "%s: don't abuse scores!" if cmd == 'scores' else "%s: stop abusing the bot!"
send(msg % nick, target=target)
with self.db.session_scope() as session:
send(misc.ignore(session, nick))
return True
@staticmethod
def build_split_msg(msg, max_len):
msgs = []
msg_enc = [x.encode() for x in msg]
while sum(map(len, msg_enc)) > max_len:
split, msg_enc = misc.split_msg(msg_enc, max_len)
msgs.append(split)
msgs.append(''.join([x.decode() for x in msg_enc]).strip())
return msgs
def send(self, target, nick, msg, msgtype, ignore_length=False, filters=None):
"""Send a message.
Records the message in the log.
"""
if not isinstance(msg, str):
raise Exception("Trying to send a %s to irc, only strings allowed." % type(msg).__name__)
if filters is None:
filters = self.outputfilter[target]
for i in filters:
if target != self.config['core']['ctrlchan']:
msg = i(msg)
# Avoid spam from commands that produce excessive output.
if not ignore_length:
# Ignore everything after the first 800 chars.
msg = misc.truncate_msg(msg, 800)
# We can't send messages > 512 bytes to irc.
max_len = misc.get_max_length(target, msgtype)
msgs = self.build_split_msg(msg, max_len)
for i in msgs:
self.do_log(target, nick, i, msgtype)
if msgtype == 'action':
self.rate_limited_send('action', target, i)
else:
self.rate_limited_send('privmsg', target, i)
def rate_limited_send(self, mtype, target, msg=None):
with self.flood_lock:
elapsed = datetime.now() - self.last_msg_time
# Don't send messages more then once every 0.5 sec.
time.sleep(max(0, 0.5 - elapsed.total_seconds()))
if msg is None:
getattr(self.connection, mtype)(target)
else:
getattr(self.connection, mtype)(target, msg)
self.last_msg_time = datetime.now()
def do_log(self, target, nick, msg, msgtype):
"""Handles logging.
| Logs to a sql db.
"""
if not isinstance(msg, str):
raise Exception("IRC doesn't like it when you send it a %s" % type(msg).__name__)
target = target.lower()
flags = 0
# Properly handle /msg +#channel
if target.startswith(('+', '@')):
target = target[1:]
with self.data_lock:
if target in self.channels:
if self.opers[target].get(nick, False):
flags |= 1
if self.voiced[target].get(nick, False):
flags |= 2
else:
target = 'private'
# FIXME: should we special-case this?
# strip ctrl chars from !creffett
msg = msg.replace('\x02\x038,4', '<rage>')
self.db.log(nick, target, flags, msg, msgtype, self.connection.server)
if self.log_to_ctrlchan:
ctrlchan = self.config['core']['ctrlchan']
if target != ctrlchan:
ctrlmsg = "%s:%s:%s:%s" % (target, msgtype, nick, msg)
# If we call self.send, we'll get a infinite loop.
self.connection.privmsg(ctrlchan, ctrlmsg.strip())
def do_part(self, cmdargs, nick, target, msgtype, send, c):
"""Leaves a channel.
Prevent user from leaving the primary channel.
"""
channel = self.config['core']['channel']
botnick = self.config['core']['nick']
if not cmdargs:
# don't leave the primary channel
if target == channel:
send("%s must have a home." % botnick)
return
else:
cmdargs = target
if not cmdargs.startswith(('#', '+', '@')):
cmdargs = '#' + cmdargs
# don't leave the primary channel
if cmdargs == channel:
send("%s must have a home." % botnick)
return
# don't leave the control channel
if cmdargs == self.config['core']['ctrlchan']:
send("%s must remain under control, or bad things will happen." % botnick)
return
self.send(cmdargs, nick, "Leaving at the request of %s" % nick, msgtype)
c.part(cmdargs)
def do_join(self, cmdargs, nick, msgtype, send, c):
"""Join a channel.
| Checks if bot is already joined to channel.
"""
if not cmdargs:
send("Join what?")
return
if cmdargs == '0':
send("I'm sorry, Dave. I'm afraid I can't do that.")
return
if not cmdargs.startswith(('#', '+', '@')):
cmdargs = '#' + cmdargs
cmd = cmdargs.split()
# FIXME: use argparse
if cmd[0] in self.channels and not (len(cmd) > 1 and cmd[1] == "force"):
send("%s is already a member of %s" % (self.config['core']['nick'], cmd[0]))
return
c.join(cmd[0])
self.send(cmd[0], nick, "Joined at the request of " + nick, msgtype)
def check_mode(self, mode):
if mode[2] != self.connection.real_nickname:
return False
if (mode[0], mode[1]) == ('-', 'o'):
return True
elif (mode[0], mode[1]) == ('+', 'b'):
return True
return False
def do_mode(self, target, msg, nick, send):
"""reop and handle guard violations."""
mode_changes = irc.modes.parse_channel_modes(msg)
with self.data_lock:
for change in mode_changes:
if change[1] == 'v':
self.voiced[target][change[2]] = True if change[0] == '+' else False
if change[1] == 'o':
self.opers[target][change[2]] = True if change[0] == '+' else False
# reop
# FIXME: handle -o+o msbobBot msbobBot
if [x for x in mode_changes if self.check_mode(x)]:
send("%s: :(" % nick, target=target)
# Assume bot admins know what they're doing.
if not self.is_admin(None, nick):
send("OP %s" % target, target='ChanServ')
send("UNBAN %s" % target, target='ChanServ')
if len(self.guarded) > 0:
# if user is guarded and quieted, devoiced, or deopped, fix that
regex = r"(.*(-v|-o|\+q|\+b)[^ ]*) (%s)" % "|".join(self.guarded)
match = re.search(regex, msg)
if match and nick not in [match.group(3), self.connection.real_nickname]:
modestring = "+voe-qb %s" % (" ".join([match.group(3)] * 5))
self.connection.mode(target, modestring)
send('Mode %s on %s by the guard system' % (modestring, target), target=self.config['core']['ctrlchan'])
def do_kick(self, send, target, nick, msg, slogan=True):
"""Kick users.
- If kick is disabled, don't do anything.
- If the bot is not a op, rage at a op.
- Kick the user.
"""
if not self.kick_enabled:
return
if target not in self.channels:
send("%s: you're lucky, private message kicking hasn't been implemented yet." % nick)
return
with self.data_lock:
ops = [k for k, v in self.opers[target].items() if v]
botnick = self.config['core']['nick']
if botnick not in ops:
ops = ['someone'] if not ops else ops
send(textutils.gen_creffett("%s: /op the bot" % random.choice(ops)), target=target)
elif random.random() < 0.01 and msg == "shutting caps lock off":
if nick in ops:
send("%s: HUEHUEHUE GIBE CAPSLOCK PLS I REPORT U" % nick, target=target)
else:
self.connection.kick(target, nick, "HUEHUEHUE GIBE CAPSLOCK PLS I REPORT U")
else:
msg = textutils.gen_slogan(msg).upper() if slogan else msg
if nick in ops:
send("%s: %s" % (nick, msg), target=target)
else:
self.connection.kick(target, nick, msg)
def do_args(self, modargs, send, nick, target, source, name, msgtype):
"""Handle the various args that modules need."""
realargs = {}
args = {
'nick': nick,
'handler': self,
'db': None,
'config': self.config,
'source': source,
'name': name,
'type': msgtype,
'botnick': self.connection.real_nickname,
'target': target if target[0] == "#" else "private",
'do_kick': lambda target, nick, msg: self.do_kick(send, target, nick, msg),
'is_admin': lambda nick: self.is_admin(send, nick),
'abuse': lambda nick, limit, cmd: self.abusecheck(send, nick, target, limit, cmd)
}
for arg in modargs:
if arg in args:
realargs[arg] = args[arg]
else:
raise Exception("Invalid Argument: %s" % arg)
return realargs
def do_welcome(self):
"""Do setup when connected to server.
- Join the primary channel.
- Join the control channel.
"""
self.rate_limited_send('join', self.config['core']['channel'])
self.rate_limited_send('join', self.config['core']['ctrlchan'], self.config['auth']['ctrlkey'])
# We use this to pick up info on admins who aren't currently in a channel.
self.workers.defer(5, False, self.get_admins)
extrachans = self.config['core']['extrachans']
if extrachans:
for chan in [x.strip() for x in extrachans.split(',')]:
self.rate_limited_send('join', chan)
def is_ignored(self, nick):
with self.db.session_scope() as session:
return session.query(orm.Ignore).filter(orm.Ignore.nick == nick).count()
def get_filtered_send(self, cmdargs, send, target):
"""Parse out any filters."""
parser = arguments.ArgParser(self.config)
parser.add_argument('--filter')
try:
filterargs, remainder = parser.parse_known_args(cmdargs)
except arguments.ArgumentException as ex:
return str(ex), None
cmdargs = ' '.join(remainder)
if filterargs.filter is None:
return cmdargs, send
filter_list, output = textutils.append_filters(filterargs.filter)
if filter_list is None:
return output, None
# define a new send to handle filter chaining
def filtersend(msg, mtype='privmsg', target=target, ignore_length=False):
self.send(target, self.connection.real_nickname, msg, mtype, ignore_length, filters=filter_list)
return cmdargs, filtersend
def do_rejoin(self, c, e):
# If we're still banned, this will trigger a bannedfromchan event so we'll try again.
if e.arguments[0] not in self.channels:
c.join(e.arguments[0])
def handle_event(self, msg, send, c, e):
if e.type == 'whospcrpl':
self.handle_who(e)
elif e.type == 'account':
self.handle_account(e)
elif e.type == 'authenticate':
self.handle_authenticate(e)
elif e.type == 'bannedfromchan':
self.workers.defer(5, False, self.do_rejoin, c, e)
elif e.type == 'cap':
self.handle_cap(e)
elif e.type in ['ctcpreply', 'nosuchnick']:
misc.ping(self.ping_map, c, e, datetime.now())
elif e.type == 'error':
logger.error(e.target)
elif e.type == 'featurelist':
if 'WHOX' in e.arguments:
self.features['whox'] = True
elif e.type == 'nick':
self.handle_nick(send, e)
elif e.type == 'nicknameinuse':
self.connection.nick('Guest%d' % random.getrandbits(20))
elif e.type == 'privnotice':
if e.source.nick == 'NickServ':
# FIXME: don't pass self
acl.set_admin(msg, self)
elif e.type == 'welcome':
self.handle_welcome()
@property
def serverpass(self):
return self.config['auth']['serverpass'].split(',')[self.idx].strip()
def handle_authenticate(self, e):
user = self.config['core']['nick']
if e.target == '+':
token = base64.b64encode('\0'.join([user, user, self.serverpass]).encode())
self.connection.send_raw('AUTHENTICATE %s' % token.decode())
self.connection.cap('END')
def handle_account(self, e):
with self.db.session_scope() as session:
admin = session.query(orm.Permissions).filter(orm.Permissions.nick == e.source.nick).first()
if admin is not None:
if e.target == '*':
admin.registered = False
else:
admin.registered = True
admin.time = datetime.now()
def handle_welcome(self):
user = self.config['core']['nick']
logger.info("Connected to server %s", self.connection.server)
if self.config.getboolean('feature', 'nickserv') and self.connection.real_nickname != self.config['core']['nick']:
self.connection.privmsg('NickServ', 'REGAIN %s %s' % (user, self.serverpass))
self.do_welcome()
def handle_who(self, e):
# arguments: tag,nick,modes,account
# modes = H(here) or G(away), +(voice), @(oper)
# account is the nicksev account if authed, else 0
# properly track voiced status.
location = self.who_map[int(e.arguments[0])]
# FIXME: devoice if G in modes
self.voiced[location][e.arguments[1]] = '+' in e.arguments[2]
self.opers[location][e.arguments[1]] = '@' in e.arguments[2]
with self.db.session_scope() as session:
admin = session.query(orm.Permissions).filter(orm.Permissions.nick == e.arguments[1]).first()
if admin is not None:
if e.arguments[1] == e.arguments[3]:
admin.registered = True
admin.time = datetime.now()
def handle_cap(self, e):
if e.arguments[0] == 'ACK':
if e.arguments[1].strip() == 'sasl':
self.connection.send_raw('AUTHENTICATE PLAIN')
elif e.arguments[1].strip() == 'account-notify':
self.features['account-notify'] = True
elif e.arguments[1].strip() == 'extended-join':
self.features['extended-join'] = True
def handle_nick(self, send, e):
with self.data_lock:
for channel in misc.get_channels(self.channels, e.target):
self.do_log(channel, e.source.nick, e.target, 'nick')
# Move the voice+op status to the new nick
if e.source.nick in self.voiced[channel].keys(): # In case we somehow didn't set the voice state on the old nick
self.voiced[channel][e.target] = self.voiced[channel].pop(e.source.nick)
if e.source.nick in self.opers[channel].keys(): # As above, for ops
self.opers[channel][e.target] = self.opers[channel].pop(e.source.nick)
if identity.handle_nick(self, e):
for x in misc.get_channels(self.channels, e.target):
self.do_kick(send, x, e.target, "identity crisis")
def handle_join(self, c, e, target, send):
# Get status for all nicks in-channel when we join, or the new nick when somebody else joins.
if self.features['whox']:
tag = random.randint(0, 999)
self.who_map[tag] = target
if e.source.nick == c.real_nickname:
self.send_who(target, tag)
else:
self.send_who(e.source.nick, tag)
if e.source.nick == c.real_nickname:
send("Joined channel %s" % target, target=self.config['core']['ctrlchan'])
elif self.features['extended-join']:
with self.db.session_scope() as session:
admin = session.query(orm.Permissions).filter(orm.Permissions.nick == e.source.nick).first()
if admin is not None:
if e.arguments[0] == e.source.nick:
admin.registered = True
admin.time = datetime.now()
else:
admin.registered = False
def get_cmd(self, msg):
cmd = msg.split()[0]
cmdchar = self.config['core']['cmdchar']
cmdlen = len(cmd) + 1
# FIXME: figure out a better way to handle !s
if cmd.startswith('%ss' % cmdchar):
# escape special regex chars
raw_cmdchar = '\\' + cmdchar if re.match(r'[\[\].^$*+?]', cmdchar) else cmdchar
match = re.match(r'%ss(\W)' % raw_cmdchar, cmd)
if match:
cmd = cmd.split(match.group(1))[0]
cmdlen = len(cmd)
cmdargs = msg[cmdlen:]
cmd_name = cmd[len(cmdchar):].lower() if cmd.startswith(cmdchar) else None
return cmd_name, cmdargs
def run_cmd(self, send, nick, target, cmd_name, cmdargs, e):
cmdargs, filtersend = self.get_filtered_send(cmdargs, send, target)
if filtersend is None:
send(cmdargs)
return
cmd_obj = registry.command_registry.get_command(cmd_name)
if cmd_obj.is_limited() and self.abusecheck(send, nick, target, cmd_obj.limit, cmd_name):
return
if not self.is_admin(send, nick, cmd_obj.required_role):
send("Insufficent privileges for command.")
return
args = self.do_args(cmd_obj.args, send, nick, target, e.source, cmd_name, e.type)
cmd_obj.run(filtersend, cmdargs, args, cmd_name, nick, target, self)
def handle_kick(self, c, e, target, send):
if e.arguments[0] == c.real_nickname:
send("Kicked from channel %s" % target, target=self.config['core']['ctrlchan'])
# Auto-rejoin after 5 seconds.
self.workers.defer(5, False, self.connection.join, target)
def handle_hooks(self, send, nick, target, e, msg):
if self.config['feature'].getboolean('hooks'):
for h in registry.hook_registry.get_hook_objects():
realargs = self.do_args(h.args, send, nick, target, e.source, h, e.type)
h.run(send, msg, e.type, self, target, realargs)
def handle_msg(self, c, e):
"""The Heart and Soul of IrcBot."""
if e.type not in ['authenticate', 'error', 'join', 'part', 'quit']:
nick = e.source.nick
else:
nick = e.source
if e.arguments is None:
msg = ""
else:
msg = " ".join(e.arguments).strip()
# Send the response to private messages to the sending nick.
target = nick if e.type == 'privmsg' else e.target
def send(msg, mtype='privmsg', target=target, ignore_length=False):
self.send(target, self.connection.real_nickname, msg, mtype, ignore_length)
if e.type in [
'account', 'authenticate', 'bannedfromchan', 'cap', 'ctcpreply', 'error', 'featurelist', 'nosuchnick', 'nick', 'nicknameinuse',
'privnotice', 'welcome', 'whospcrpl'
]:
self.handle_event(msg, send, c, e)
return
# ignore empty messages
if not msg and e.type != 'join':
return
self.do_log(target, nick, msg, e.type)
if e.type == 'mode':
self.do_mode(target, msg, nick, send)
return
if e.type == 'join':
self.handle_join(c, e, target, send)
return
if e.type == 'part':
if nick == c.real_nickname:
send("Parted channel %s" % target, target=self.config['core']['ctrlchan'])
return
if e.type == 'kick':
self.handle_kick(c, e, target, send)
return
if e.target == self.config['core']['ctrlchan'] and self.is_admin(None, nick):
control.handle_ctrlchan(self, msg, nick, send)
if self.is_ignored(nick) and not self.is_admin(None, nick):
return
self.handle_hooks(send, nick, target, e, msg)
# We only process hooks for notices, not commands.
if e.type == 'pubnotice':
return
msg = misc.get_cmdchar(self.config, c, msg, e.type)
cmd_name, cmdargs = self.get_cmd(msg)
if registry.command_registry.is_registered(cmd_name):
self.run_cmd(send, nick, target, cmd_name, cmdargs, e)
# special commands
elif cmd_name == 'reload':
with self.db.session_scope() as session:
if session.query(orm.Permissions).filter(orm.Permissions.nick == nick).count():
send("Aye Aye Capt'n")
|
gpl-2.0
| 8,572,865,770,078,463,000 | 40.516176 | 144 | 0.562715 | false | 3.842521 | true | false | false |
frank-und-freunde/Lunchez
|
functions.py
|
1
|
1594
|
import math
from datetime import datetime
def weekDay(year, month, day):
offset = [0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334]
week = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday']
afterFeb = 1
if month > 2:
afterFeb = 0
aux = year - 1700 - afterFeb
dayOfWeek = 5
dayOfWeek += (aux + afterFeb) * 365
dayOfWeek += aux / 4 - aux / 100 + (aux + 100) / 400
dayOfWeek += offset[month - 1] + (day - 1)
dayOfWeek %= 7
return week[math.floor(dayOfWeek)]
Today = weekDay(int(str(datetime.now())[:4]), int(str(datetime.now())[5:7].lstrip('0')), int(str(datetime.now())[8:10]))
def restaurants(spots):
destination = ''
for x in range(0, len(spots)):
entry = ''
if 'dayoff' in spots[x] and spots[x]['dayoff'] == Today:
entry = ''
elif 'vacationFrom' in spots[x] and spots[x]['vacationFrom'] < str(datetime.now()) < spots[x]['vacationTo']:
entry = ''
else:
if 'menu' in spots[x] and 'credit' in spots[x]: # if lunchspot has payment option other than cash display card emoji
entry = "<" + spots[x]['location'] + "|:" + spots[x]['number'] + ":> <" + spots[x]['menu'] + "|" + spots[x]['restaurant'] + "> :credit_card:\n"
elif 'menu' in spots[x]:
entry = "<" + spots[x]['location'] + "|:" + spots[x]['number'] + ":> <" + spots[x]['menu'] + "|" + spots[x]['restaurant'] + ">\n"
else:
entry = "<" + spots[x]['location'] + "|:" + spots[x]['number'] + ":> " + spots[x]['restaurant'] + "\n"
destination += entry
return destination
|
mit
| 5,577,577,409,626,868,000 | 40.947368 | 151 | 0.557089 | false | 2.872072 | false | false | false |
webrecorder/warcio
|
test/test_limitreader.py
|
1
|
1457
|
from warcio.limitreader import LimitReader
from contextlib import closing
from io import BytesIO
class TestLimitReader(object):
def test_limit_reader_1(self):
assert b'abcdefghji' == LimitReader(BytesIO(b'abcdefghjiklmnopqrstuvwxyz'), 10).read(26)
def test_limit_reader_2(self):
assert b'abcdefgh' == LimitReader(BytesIO(b'abcdefghjiklmnopqrstuvwxyz'), 8).readline(26)
def test_limit_reader_3(self):
reader = LimitReader(BytesIO(b'abcdefghjiklmnopqrstuvwxyz'), 8)
new_reader = LimitReader.wrap_stream(reader, 4)
assert reader == new_reader
assert b'abcd' == new_reader.readline(26)
#assert b'abcd' == LimitReader.wrap_stream(LimitReader(BytesIO(b'abcdefghjiklmnopqrstuvwxyz'), 8), 4).readline(26)
def test_limit_reader_multiple_read(self):
reader = LimitReader(BytesIO(b'abcdefghjiklmnopqrstuvwxyz'), 10)
string = None
for x in [2, 2, 20]:
string = reader.read(x)
assert b'efghji' == string
def test_limit_reader_zero(self):
assert b'' == LimitReader(BytesIO(b'a'), 0).readline(0)
def test_limit_reader_invalid_wrap(self):
b = BytesIO(b'some data')
assert LimitReader.wrap_stream(b, 'abc') == b
def test_limit_reader_close(self):
reader = LimitReader(BytesIO(b'abcdefg'), 3)
with closing(reader):
assert b'abc' == reader.read(10)
assert reader.tell() == 3
|
apache-2.0
| 2,581,903,342,736,774,000 | 36.358974 | 122 | 0.654084 | false | 3.571078 | true | false | false |
saintdragon2/python-3-lecture-2015
|
homework_checker/civil_hw_personal_list/civil_hw_list.py
|
1
|
2177
|
from __future__ import print_function
import glob
import hw_sungyong_list
import os
# os.chdir("homework_01")
list_a = [10, 30, 40, -20, 15]
list_b = [-90, 20, 50, 2, 4]
list_c = ['hello', 34, 0, 12]
num_p = 3
num_q = 7
sy_a = hw_sungyong_list.square_of_list(list_a, num_p)
sy_b = hw_sungyong_list.square_of_list(list_b, num_q)
sy_x = hw_sungyong_list.gap(list_c)
print(sy_a)
print(sy_b)
print(sy_x)
f = open('result.txt', 'w')
for file in glob.glob("hw_civil_list_*.py"):
point = 0
name = file.replace('.py', '')
print(name)
mode = __import__(name)
a = mode.square_of_list(list_a, num_p)
b = mode.square_of_list(list_b, num_p)
c = mode.gap(list_c)
message = ''
point = 0
if a == sy_a:
point += 3
if b == '문자열이 있습니다':
point += 3
if c == sy_x:
point += 3
print(point)
f.write(name + '\t' + str( point ) + '\n')
f.close()
'''
dan_num = 4
five_num = 35
three_num = 369
fifteen_num = 15 * 7
sungyong_dan = sungyong_dan_gg.dan(dan_num)
five = sungyong_dan_gg.baesoo(five_num)
three = sungyong_dan_gg.baesoo(three_num)
fifteen = sungyong_dan_gg.baesoo(fifteen_num)
f = open('result.txt', 'w')
for file in glob.glob("hw_zest*.py"):
point = 0
name = file.replace('.py', '')
print(name)
mode = __import__(name)
a = mode.dan(dan_num)
message = ''
if type(a) is str and sungyong_dan in a:
point += 5
else:
message += 'dan failed!\t'
five_result = mode.baesoo(five_num)
if type(five_result) is str and '5의 배수입니다' in five_result:
point += 2
else:
message += '5 failed!\t'
three_result = mode.baesoo(three_num)
if type(three_result) is str and '3의 배수입니다' in three_result:
point += 2
else:
message += '3 failed!\t'
fifteen_result = mode.baesoo(fifteen_num)
if type(fifteen_result) is str and '3과 5의 공배수입니다' in mode.baesoo(fifteen_num):
point += 2
else:
message += '3 & 5 failed!\t'
f.write(name +'\t'+ str(point) + '\t' + message + '\n')
# from homework_01 import eval(name)
f.close()
'''
|
mit
| 4,742,109,069,305,678,000 | 19.403846 | 82 | 0.573314 | false | 2.46915 | false | false | false |
yostashiro/awo-custom
|
sale_line_quant_extended/wizard/stock_return_picking.py
|
1
|
1645
|
# -*- coding: utf-8 -*-
# Odoo, Open Source Management Solution
# Copyright (C) 2016 Rooms For (Hong Kong) Limited T/A OSCG
# <https://www.odoo-asia.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openerp import models, api
class StockReturnPicking(models.TransientModel):
_inherit = "stock.return.picking"
@api.model
def default_get(self, fields):
return_pick = super(StockReturnPicking, self).default_get(fields)
if 'product_return_moves' in return_pick:
return_moves = return_pick['product_return_moves']
for move in return_moves:
if self.env['product.product'].browse(move['product_id']).\
product_tmpl_id.categ_id.enforce_qty_1:
quant = self.env['stock.quant'].search(
[('history_ids', 'in', move['move_id'])])
if quant and quant.lot_id:
move['lot_id'] = quant.lot_id.id
return return_pick
|
lgpl-3.0
| -1,805,168,410,514,851,300 | 43.459459 | 77 | 0.642553 | false | 3.926014 | false | false | false |
steelsoul/hide_to_tray
|
interface5.py
|
1
|
5039
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'interface.ui'
#
# Created by: PyQt5 UI code generator 5.14.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.setWindowModality(QtCore.Qt.NonModal)
MainWindow.setEnabled(True)
MainWindow.resize(401, 251)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
MainWindow.setSizePolicy(sizePolicy)
MainWindow.setMinimumSize(QtCore.QSize(401, 251))
MainWindow.setMaximumSize(QtCore.QSize(401, 251))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("../../../.designer/backup/bomb.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
icon.addPixmap(QtGui.QPixmap("bomb.png"), QtGui.QIcon.Normal, QtGui.QIcon.On)
MainWindow.setWindowIcon(icon)
MainWindow.setWindowOpacity(1.0)
self.centralwidget = QtWidgets.QWidget(MainWindow)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.centralwidget.sizePolicy().hasHeightForWidth())
self.centralwidget.setSizePolicy(sizePolicy)
self.centralwidget.setObjectName("centralwidget")
self.layoutWidget = QtWidgets.QWidget(self.centralwidget)
self.layoutWidget.setGeometry(QtCore.QRect(10, 10, 381, 191))
self.layoutWidget.setObjectName("layoutWidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.layoutWidget)
self.verticalLayout.setSizeConstraint(QtWidgets.QLayout.SetDefaultConstraint)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.label = QtWidgets.QLabel(self.layoutWidget)
self.label.setObjectName("label")
self.horizontalLayout.addWidget(self.label)
self.lineEdit = QtWidgets.QLineEdit(self.layoutWidget)
self.lineEdit.setInputMask("")
self.lineEdit.setText("")
self.lineEdit.setMaxLength(3)
self.lineEdit.setCursorPosition(0)
self.lineEdit.setObjectName("lineEdit")
self.horizontalLayout.addWidget(self.lineEdit)
self.label_2 = QtWidgets.QLabel(self.layoutWidget)
self.label_2.setObjectName("label_2")
self.horizontalLayout.addWidget(self.label_2)
self.verticalLayout.addLayout(self.horizontalLayout)
self.lcdNumber = QtWidgets.QLCDNumber(self.layoutWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lcdNumber.sizePolicy().hasHeightForWidth())
self.lcdNumber.setSizePolicy(sizePolicy)
self.lcdNumber.setProperty("value", 0.0)
self.lcdNumber.setObjectName("lcdNumber")
self.verticalLayout.addWidget(self.lcdNumber)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.pushButton = QtWidgets.QPushButton(self.layoutWidget)
self.pushButton.setObjectName("pushButton")
self.horizontalLayout_2.addWidget(self.pushButton)
self.pushButton_2 = QtWidgets.QPushButton(self.layoutWidget)
self.pushButton_2.setObjectName("pushButton_2")
self.horizontalLayout_2.addWidget(self.pushButton_2)
self.verticalLayout.addLayout(self.horizontalLayout_2)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 401, 25))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setEnabled(False)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MiniTimer"))
self.label.setText(_translate("MainWindow", "Enter timeout:"))
self.label_2.setText(_translate("MainWindow", "minutes"))
self.pushButton.setText(_translate("MainWindow", "Start"))
self.pushButton_2.setText(_translate("MainWindow", "Reset"))
|
gpl-3.0
| 8,543,905,054,595,443,000 | 50.418367 | 112 | 0.71919 | false | 4.299488 | false | false | false |
alex3287/PyCharmProjects
|
project/parser_1.py
|
1
|
2603
|
# Парсер для сбора внешних ссылок с сайта
from urllib.request import urlopen
from urllib.error import HTTPError
from bs4 import BeautifulSoup
url = 'http://gymn11.ru'
def get_html(url):
'''Считывает страницу в html'''
try:
html = urlopen(url)
except:
print('нет сайта такого')
return None
else:
return html.read()
def all_links(html):
"""Находит все ссылки на страницы
и помещает их в список"""
suop = BeautifulSoup(html, "html.parser")
links = suop.body.find_all("a")
mas = []
for link in links:
if 'href' in link.attrs:
mas.append(link.attrs['href'])
return mas
def print_l(links):
'''вывод каждой ссылки в отдельную строчку'''
k=0
for i in links:
k+=1
print(k, i)
def type_links(mas):
global url
"""Делит список ссылок на 2 категории:
1. внешнии
2. внутренние"""
input2 = []
output2 = []
for i in mas:
if ('http:' in i) or ('https:' in i):
if url not in i:
output2.append(i)
elif (len(i)>2) and ('java' not in i):
input2.append(i)
return output2, input2
def sort2(mas):
"""Отбрасывает одинаковые ссылки"""
b=[]
for i in mas:
if i[-1]!='/':
k=i+'/'
b.append(k)
else: b.append(i)
links1 = set(b)
links=list(links1)
return links
def out_link(links):
"""Создает настаящую ссылку из внутренней ссылки"""
global url
out_li = []
for i in links:
link = url+i
out_li.append(link)
return out_li
def seach_links(links):
links_list = []
n = 0
for i in links:
htm = get_html(i)
if htm:
n += 1
print('сделано', n)
links5 = all_links(htm)
links6 = type_links(links5)
links7 = sort2(links6[0])
for k in links7:
# print(k)
links_list.append(k)
return sort2(links_list)
if __name__ == "__main__":
# url = input("Введите сайт для парсенга \n>>>")
html = get_html(url)
links = all_links(html)
links2 = type_links(links)
links3 = out_link(sort2(links2[1]))
print_l(links3)
print('*'*150)
print_l(seach_links(links3))
print('used')
|
gpl-3.0
| 583,739,816,110,222,500 | 22.2 | 55 | 0.539025 | false | 2.626274 | false | false | false |
TheLazyHase/dragon_dice_simulator
|
business/dice/face/save_with_special/counter.py
|
1
|
1773
|
# -*- coding: utf-8 *-*
# Copyright (c) 2013 Tisserant Pierre
#
# This file is part of Dragon dice simulator.
#
# Dragon dice simulator is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Dragon dice simulator is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Dragon dice simulator. If not, see <http://www.gnu.org/licenses/>.
from business.dice.face import Face, SAI, Melee, Save
from business.effect import UnsaveableDamageEffect
class Counter(SAI, Melee, Save):
@property
def name(self):
return '%s Counter' % self.amount
def icon_by_type(self, icon_type):
value = 0
if (icon_type == Face.ICON_MELEE):
if (self.type_roll.is_melee):
value = self.amount
elif (icon_type == Face.ICON_SAVE):
if (self.type_roll.is_save):
value = self.amount
return value
@property
def special_effect(self):
value = None
#@TODO : restrict back damage to missile saving throw
if (self.type_roll.is_melee_save):
value = UnsaveableDamageEffect(self.amount)
return value
icon = {
Face.ICON_MELEE: 1,
Face.ICON_MISSILE: 0,
Face.ICON_MANEUVER: 0,
Face.ICON_MAGIC: 0,
Face.ICON_SAVE: 1,
}
|
gpl-3.0
| -6,605,072,641,197,626,000 | 33.764706 | 83 | 0.646926 | false | 3.640657 | false | false | false |
jimstorch/tokp
|
tokp_lib/system_rules.py
|
1
|
1980
|
#------------------------------------------------------------------------------
# File: system_rules.py
# Purpose:
# Author: James Mynderse
# Revised:
# License: GPLv3 see LICENSE.TXT
#------------------------------------------------------------------------------
import datetime
# defined by loot system rules:
SystemStartDate = datetime.datetime(2008,11,13,6,0)
RaidWeekStart = 2
PartFactor = {0.5:0.00, 1:0.10, 2:0.25, 3:0.50, 4:0.75}
PointsPerDay = {0.5:0.00, 1:0.82, 2:1.29, 3:1.68, 4:2.00}
PointDecay = {0:0.0, 1:0.0, 2:2.0, 3:4.0, 4:8.0, 5:10.0}
ValueLabels = {"epic":1, "rare":2, "uncommon":3, "zg":4, "special":5}
RevValueLabels = {1:"epic", 2:"rare", 3:"uncommon", 4:"zg", 5:"special"}
ValueCosts = {1:20 , 2:6, 3:3, 4:1, 5:0}
MinCost = 20
MaxCost = 50
ResetPercent = 0.75
MinPoints = -50
MaxPoints = 150
SkipRepeatParticipation = 1
def subtract_loot(OldScores, LootValueIndex):
# reset equal and less valuable scores
# subtract from more valuable scores
NewScores = {}
#print OldScores
#print LootValueIndex
for index in OldScores.keys():
#print index
if index >= LootValueIndex:
NewScores[index] = reset_score(OldScores[index])
else:
NewScores[index] = OldScores[index] - ValueCosts[LootValueIndex]
if NewScores[index] < MinPoints:
NewScores[index] = MinPoints
#print OldScores, LootValueIndex, NewScores
return NewScores
def reset_score(OldScore):
if 1:
# this is the old system, here for posterity
# reset cost
ResetCost = ResetPercent * OldScore
# chose which cost to use
if ResetCost < MinCost:
NewScore = OldScore - MinCost
elif ResetCost > MaxCost:
NewScore = OldScore - MaxCost
else:
NewScore = OldScore - ResetCost
else:
NewScore = OldScore
return NewScore
|
gpl-3.0
| 7,329,093,138,148,140,000 | 32.172414 | 79 | 0.554545 | false | 3.219512 | false | false | false |
WaveBlocks/WaveBlocksND
|
WaveBlocksND/IOM_plugin_lincombwp.py
|
1
|
13903
|
"""The WaveBlocks Project
IOM plugin providing functions for handling
linear combinations of general wavepackets.
@author: R. Bourquin
@copyright: Copyright (C) 2013, 2016 R. Bourquin
@license: Modified BSD License
"""
import numpy as np
def add_lincombwp(self, parameters, timeslots=None, lincombsize=None, blockid=0):
r"""Add storage for the linear combination of general wavepackets.
:param parameters: An :py:class:`ParameterProvider` instance with at
least the key ``ncomponents``.
:param timeslots: The number of time slots we need. Can be set to ``None``
to get automatically growing datasets.
:param lincombsize: The (maximal) size ``J`` of the linear combination of wavepackets. If specified
this remains fixed for all timeslots. Can be set to ``None`` (default)
to get automatically growing datasets.
:param blockid: The ID of the data block to operate on.
"""
N = parameters["ncomponents"]
# TODO: Handle multi-component packets
assert N == 1
if timeslots is None:
T = 0
Ts = None
else:
T = timeslots
Ts = timeslots
if lincombsize is None:
J = 0
Js = None
csJs = 32
else:
J = lincombsize
Js = lincombsize
csJs = min(32, Js)
# The overall group containing all lincombwp data
grp_lc = self._srf[self._prefixb + str(blockid)].require_group("lincombwp")
# Create the dataset with appropriate parameters
daset_tg_c = grp_lc.create_dataset("timegrid_coefficients", (T,), dtype=np.integer, chunks=True, maxshape=(Ts,), fillvalue=-1)
daset_tg_p = grp_lc.create_dataset("timegrid_packets", (T,), dtype=np.integer, chunks=True, maxshape=(Ts,), fillvalue=-1)
grp_lc.create_dataset("lincomb_size", (T,), dtype=np.integer, chunks=True, maxshape=(Ts,))
# Coefficients
grp_lc.create_dataset("coefficients", (T, J), dtype=np.complexfloating, chunks=(1, csJs), maxshape=(Ts, Js))
# Packet IDs (32 characters is the length of a 'md5' digest in hex representation)
daset_refs = grp_lc.create_dataset("packet_refs", (T, J), dtype=np.dtype((str, 32)), chunks=(1, csJs), maxshape=(Ts, Js))
gid = self.create_group(groupid="wavepacketsLCblock" + str(blockid))
daset_refs.attrs["packet_gid"] = gid
# Attach pointer to timegrid
daset_tg_c.attrs["pointer"] = 0
daset_tg_p.attrs["pointer"] = 0
def delete_lincombwp(self, blockid=0):
r"""Remove the stored linear combination.
:param blockid: The ID of the data block to operate on.
"""
try:
del self._srf[self._prefixb + str(blockid) + "/lincombwp"]
except KeyError:
pass
def has_lincombwp(self, blockid=0):
r"""Ask if the specified data block has the desired data tensor.
:param blockid: The ID of the data block to operate on.
"""
return "lincombwp" in self._srf[self._prefixb + str(blockid)].keys()
def save_lincombwp_description(self, descr, blockid=0):
r"""Save the description of this linear combination.
:param descr: The description.
:param blockid: The ID of the data block to operate on.
"""
pathd = "/" + self._prefixb + str(blockid) + "/lincombwp"
# Save the description
for key, value in descr.items():
self._srf[pathd].attrs[key] = self._save_attr_value(value)
def save_lincombwp_coefficients(self, coefficients, timestep=None, blockid=0):
r"""Save the coefficients of the linear combination to a file.
:param coefficients: The coefficients of the linear combination of wavepackets.
:type coefficients: A single, suitable :py:class:`ndarray`.
:param timestep: The timestep at which we save the data.
:param blockid: The ID of the data block to operate on.
"""
pathtg = "/" + self._prefixb + str(blockid) + "/lincombwp/timegrid_coefficients"
pathlcs = "/" + self._prefixb + str(blockid) + "/lincombwp/lincomb_size"
pathd = "/" + self._prefixb + str(blockid) + "/lincombwp/coefficients"
timeslot = self._srf[pathtg].attrs["pointer"]
# Write the data
self.must_resize(pathlcs, timeslot)
J = np.size(coefficients)
self._srf[pathlcs][timeslot] = J
self.must_resize(pathd, timeslot)
if not J == 0:
self.must_resize(pathd, J - 1, axis=1)
self._srf[pathd][timeslot, :J] = np.squeeze(coefficients)
# Write the timestep to which the stored values belong into the timegrid
self.must_resize(pathtg, timeslot)
self._srf[pathtg][timeslot] = timestep
# Update the pointer
self._srf[pathtg].attrs["pointer"] += 1
def save_lincombwp_wavepackets(self, packetlist, timestep=None, blockid=0):
r"""Save the wavepackets being part of this linear combination.
.. warning:: This is quite an expensive operation.
:param timestep: Load only the data of this timestep.
:param blockid: The ID of the data block to operate on.
"""
pathtg = "/" + self._prefixb + str(blockid) + "/lincombwp/timegrid_packets"
pathd = "/" + self._prefixb + str(blockid) + "/lincombwp/packet_refs"
gid = self._srf[pathd].attrs["packet_gid"]
timeslot = self._srf[pathtg].attrs["pointer"]
# Book keeping
self.must_resize(pathd, timeslot)
K = len(packetlist)
if not K == 0:
self.must_resize(pathd, K - 1, axis=1)
# Save the packets
known_packets = self.get_block_ids(groupid=gid)
for k, packet in enumerate(packetlist):
bid = "LC" + str(blockid) + "WP" + str(packet.get_id())
if bid not in known_packets:
bid = self.create_block(blockid=bid, groupid=gid)
descr = packet.get_description()
self.add_genericwp(descr, blockid=bid)
self.save_genericwp(packet, timestep=timestep, blockid=bid)
# Book keeping
self._srf[pathd][timeslot, k] = packet.get_id()
# Write the timestep to which the stored packets belong into the timegrid
self.must_resize(pathtg, timeslot)
self._srf[pathtg][timeslot] = timestep
# Update the pointer
self._srf[pathtg].attrs["pointer"] += 1
def load_lincombwp_description(self, blockid=0):
r"""Load the description of this linear combination.
:param blockid: The ID of the data block to operate on.
"""
pathd = "/" + self._prefixb + str(blockid) + "/lincombwp"
# Load and return all descriptions available
descr = {}
for key, value in self._srf[pathd].attrs.items():
descr[key] = self._load_attr_value(value)
return descr
def load_lincombwp_timegrid(self, blockid=0, key=("coeffs", "packets")):
r"""Load the timegrid of this linear combination.
:param blockid: The ID of the data block to operate on.
:param key: Specify which linear combination timegrids to load. All are independent.
:type key: Tuple of valid identifier strings that are ``coeffs`` and ``packets``.
Default is ``("coeffs", "packets")``.
"""
tg = []
for item in key:
if item == "coeffs":
pathtg = "/" + self._prefixb + str(blockid) + "/lincombwp/timegrid_coefficients"
tg.append(self._srf[pathtg][:])
elif item == "packets":
pathtg = "/" + self._prefixb + str(blockid) + "/lincombwp/timegrid_packets"
tg.append(self._srf[pathtg][:])
if len(tg) == 1:
return tg[0]
else:
return tuple(tg)
def load_lincombwp_size(self, timestep=None, blockid=0):
r"""Load the size (number of packets) of this linear combination.
:param timestep: Load only the data of this timestep.
:param blockid: The ID of the data block to operate on.
"""
pathtg = "/" + self._prefixb + str(blockid) + "/lincombwp/timegrid_coefficients"
pathlcs = "/" + self._prefixb + str(blockid) + "/lincombwp/lincomb_size"
if timestep is not None:
index = self.find_timestep_index(pathtg, timestep)
return self._srf[pathlcs][index]
else:
index = slice(None)
return self._srf[pathlcs][index]
def load_lincombwp_coefficients(self, timestep=None, blockid=0):
r"""Load the coefficients of this linear combination.
:param timestep: Load only the data of this timestep.
:param blockid: The ID of the data block to operate on.
"""
pathtg = "/" + self._prefixb + str(blockid) + "/lincombwp/timegrid_coefficients"
pathlcs = "/" + self._prefixb + str(blockid) + "/lincombwp/lincomb_size"
pathd = "/" + self._prefixb + str(blockid) + "/lincombwp/coefficients"
if timestep is not None:
index = self.find_timestep_index(pathtg, timestep)
J = self._srf[pathlcs][index]
return self._srf[pathd][index, :J]
else:
index = slice(None)
return self._srf[pathd][index, :]
def load_lincombwp_wavepackets(self, timestep, packetindex=None, blockid=0):
r"""Load the wavepackets being part of this linear combination.
Note that this is quite an expensive operation.
:param timestep: Load only the data of this timestep.
:param packetindex: Load only the packet with this index. If ``None``
then load all packets for the given timestep.
:param blockid: The ID of the data block to operate on.
"""
pathtg = "/" + self._prefixb + str(blockid) + "/lincombwp/timegrid_packets"
pathlcs = "/" + self._prefixb + str(blockid) + "/lincombwp/lincomb_size"
pathd = "/" + self._prefixb + str(blockid) + "/lincombwp/packet_refs"
index = self.find_timestep_index(pathtg, timestep)
J = self._srf[pathlcs][index]
refs = self._srf[pathd][index, :J]
if packetindex is None:
packets = []
for ref in refs:
bid = "LC" + str(blockid) + "WP" + str(ref)
packets.append(self.load_genericwp(timestep=timestep, blockid=bid))
return tuple(packets)
else:
if packetindex >= J:
raise ValueError("Packet index is invalid.")
bid = "LC" + str(blockid) + "WP" + str(refs[packetindex])
return self.load_genericwp(timestep=timestep, blockid=bid)
def load_lincombwp_wavepacket_refs(self, timestep=None, blockid=0):
r"""Load the references of the wavepackets being part of
this linear combination. References can be used as ``blockid``
for loading selected wavepackets manually. If for example a
``ref`` obtained through this method is:
>>> refs = anIom.load_lincombwp_wavepacket_refs(timestep=4)
>>> refs
array(['673290fd36a0fa80f28973ae31f10378',
'075dc9d7d2c558c97608e2fe08a7d53d',
'0aed8bf3e21b5894bf89ef894d3f7d0c'],
dtype='|S32')
>>> ref = refs[0]
'673290fd36a0fa80f28973ae31f10378'
the the corresponding block ID is:
>>> bid = "LC" + str(blockid) + "WP" + ref
'LC0WP673290fd36a0fa80f28973ae31f10378'
with ``blockid`` the block ID where the linear combination
was stored. With that ``bid`` we can now for example load
data of a selected wavepacket:
>>> Pi = anIom.load_wavepacket_parameters(timestep=4, blockid=bid)
in case of a Hagedorn wavepacket.
:param timestep: Load only the data of this timestep.
:param blockid: The ID of the data block to operate on.
:return: A :py:class:`ndarray` of strings.
"""
pathtg = "/" + self._prefixb + str(blockid) + "/lincombwp/timegrid_packets"
pathd = "/" + self._prefixb + str(blockid) + "/lincombwp/packet_refs"
if timestep is not None:
index = self.find_timestep_index(pathtg, timestep)
else:
index = slice(None)
return self._srf[pathd][index, :]
#
# The following two methods are only for convenience and are NOT particularly efficient.
#
def load_lincombwp(self, timestep, blockid=0):
r"""Load a linear combination at a given timestep and return a fully configured
:py:class:`LinearCombinationOfWPs` instance. This method just calls some other
:py:class:`IOManager` methods in the correct order. It is included only for
convenience and is not particularly efficient.
:param timestep: The timestep :math:`n` we load the wavepacket.
:param blockid: The ID of the data block to operate on.
:return: A :py:class:`LinearCombinationOfWPs` instance.
"""
from WaveBlocksND.LinearCombinationOfWPs import LinearCombinationOfWPs
descr = self.load_lincombwp_description(blockid=blockid)
J = self.load_lincombwp_size(timestep=timestep, blockid=blockid)
if J == 0:
return None
# Load the data
c = self.load_lincombwp_coefficients(timestep=timestep, blockid=blockid)
psi = self.load_lincombwp_wavepackets(timestep=timestep, blockid=blockid)
# Assemble the linear combination
LC = LinearCombinationOfWPs(descr["dimension"], descr["ncomponents"])
LC.add_wavepackets(psi, c)
return LC
def save_lincombwp(self, lincomb, timestep, blockid=0):
r"""Save a linear combination of general wavepackets at a given timestep and read
all data to save from the :py:class:`LinearCombinationOfWPs` instance provided. This
method just calls some other :py:class:`IOManager` methods in the correct order.
It is included only for convenience and is not particularly efficient. We assume
the linear combination is already set up with the correct :py:meth:`add_lincombwp`
method call.
:param lincomb: The :py:class:`LinearCombinationOfWPs` instance we want to save.
:param timestep: The timestep :math:`n` at which we save the linear combination.
:param blockid: The ID of the data block to operate on.
"""
# Description
self.save_lincombwp_description(lincomb.get_description(), blockid=blockid)
# Wavepackets
self.save_lincombwp_wavepackets(lincomb.get_wavepackets(), timestep=timestep, blockid=blockid)
# Coefficients
self.save_lincombwp_coefficients(lincomb.get_coefficients(), timestep=timestep, blockid=blockid)
|
bsd-3-clause
| -8,754,087,416,531,690,000 | 36.474394 | 130 | 0.662519 | false | 3.471411 | false | false | false |
Chaffleson/blupy
|
blupy/settings.py
|
1
|
4657
|
"""
Django settings for blupy project.
Generated by 'django-admin startproject' using Django 1.8.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from confresolver import *
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = DJANGO_SECRET
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['localhost', '127.0.0.1', DEV_SITE_URL]
# Celery Config
# http://celery.readthedocs.org/en/latest/django/first-steps-with-django.html
# Broker URL for CloudAMQP integration
BROKER_URL = BOUND_SERVICES['CloudAMQP']['credentials']['uri']
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERYD_HIJACK_ROOT_LOGGER = False
CELERY_TIMEZONE = 'Europe/London'
CELERY_DISABLE_RATE_LIMITS = True
# CloudAMQP recommended settings
BROKER_POOL_LIMIT = 1 # Will decrease connection usage
BROKER_CONNECTION_TIMEOUT = 30 # May require a long timeout due to Linux DNS timeouts etc
BROKER_HEARTBEAT = 30 # Will detect stale connections faster
CELERY_SEND_EVENTS = False # Will not create celeryev.* queues
CELERY_EVENT_QUEUE_EXPIRES = 60 # Will delete all celeryev. queues without consumers after 1 minute.
# Using Finalware to auto create the super user for convenience
# http://stackoverflow.com/a/11210730/4717963
SITE_SUPERUSER_USERNAME = 'admin'
SITE_SUPERUSER_EMAIL = '[email protected]'
SITE_SUPERUSER_PASSWORD = SUPER_USER_PASSWORD # this is set in settings_local
SITE_SUPERUSER_ID = '48'
SITE_OBJECTS_INFO_DICT = {
'1': {
'name': 'development',
'domain': DEV_SITE_URL,
}
}
SITE_ID = 1
# Application definition
INSTALLED_APPS = (
'django.contrib.sites',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'djcelery',
'example',
'finalware',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'blupy.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'finalware.context_processors.contextify',
],
},
},
]
WSGI_APPLICATION = 'blupy.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'example01',
'USER': BOUND_SERVICES['PostgreSQL']['credentials']['username'],
'PASSWORD': BOUND_SERVICES['PostgreSQL']['credentials']['password'],
'HOST': BOUND_SERVICES['PostgreSQL']['credentials']['public_hostname'].split(':')[0],
'PORT': BOUND_SERVICES['PostgreSQL']['credentials']['public_hostname'].split(':')[1]
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'
|
gpl-2.0
| -8,794,081,472,193,870,000 | 29.84106 | 101 | 0.704316 | false | 3.568582 | false | false | false |
sburnett/seattle
|
production_nat_new/test/ut_newnatdeployed_legacywithnormalclient.py
|
1
|
1401
|
"""
One server connects to a deployed forwarder
One legacy client connects to the server
One new client connects to the server
A few messages are exchanged
"""
#pragma repy restrictions.normal
include NatForwardingShim.repy
include NATLayer_rpc.repy
def response(remote_ip,remote_port,sock,th,listenhandle):
try:
while True:
msg = sock.recv(1024)
sock.send('got'+msg)
except:
sock.close()
if callfunc == 'initialize':
serverkey = 'NAT$BLAHBLAHBLAH'
ip = '127.0.0.1'
port = 12345
# use the nat shim
server_shim = ShimStack('(NatForwardingShim)(NullShim)')
handle = server_shim.waitforconn(serverkey,12347,response)
sleep(10) # need to sleep while the value is advertised
# CLIENT LOGIC
# open connection using the legacy client
# manually enter the forwarder info
legacy_sock = nat_openconn(serverkey, 12347)
#client_shim = ShimStack('(NatForwardingShim)(NullShim)')
#sock = client_shim.openconn(serverkey,12347)
sock = nat_openconn(serverkey, 12347)
for i in range(10):
legacy_sock.send(str(i))
sock.send(str(i))
legacy_msg = legacy_sock.recv(10)
msg1 = sock.recv(10)
if msg1 != 'got'+str(i):
print 'GOT WRONG MSG FROM SHIM SOCK'
elif legacy_msg != 'got'+str(i):
print 'GOT WRONG MSG FROM LEGACY SOCK'
legacy_sock.close()
sock.close()
exitall()
|
mit
| 6,065,202,797,805,868,000 | 18.191781 | 60 | 0.670236 | false | 3.213303 | false | false | false |
ikumen/project-euler
|
solutions/013.py
|
1
|
1047
|
#!/usr/bin/env python
'''
013.py: https://projecteuler.net/problem=13
Large Sum
Work out the first ten digits of the sum of the following one-hundred 50-digit numbers.
37107287533902102798797998220837590246510135740250
46376937677490009712648124896970078050417018260538
...
20849603980134001723930671666823555245252804609722
53503534226472524250874054075591789781264330331690
'''
import os
import pytest
import time
def first_ten_digits_of_sum(n, numbers):
'''Finds the first n digits of the sum of numbers list.'''
return (str(sum(numbers)))[:10]
def test_first_ten_digits_of_sum():
'''Test'''
assert '5537376230' == first_ten_digits_of_sum(10, load_numbers())
def load_numbers():
with open(os.path.join(os.path.dirname(__file__), 'data/013.txt')) as input:
return list(map(int, input.readlines()))
def main():
'''Main runner, delegates to solution.'''
print(first_ten_digits_of_sum(10, load_numbers()))
if __name__ == '__main__':
start_time = time.time()
main()
print("--- %s seconds ---" % (time.time() - start_time))
|
mit
| -689,761,490,315,390,700 | 22.795455 | 87 | 0.723018 | false | 2.755263 | false | false | false |
UltronAI/Deep-Learning
|
Pattern-Recognition/hw2-Feature-Selection/skfeature/example/test_chi_square.py
|
1
|
1627
|
import scipy.io
from sklearn.metrics import accuracy_score
from sklearn import cross_validation
from sklearn import svm
from skfeature.function.statistical_based import chi_square
def main():
# load data
mat = scipy.io.loadmat('../data/BASEHOCK.mat')
X = mat['X'] # data
X = X.astype(float)
y = mat['Y'] # label
y = y[:, 0]
n_samples, n_features = X.shape # number of samples and number of features
# split data into 10 folds
ss = cross_validation.KFold(n_samples, n_folds=10, shuffle=True)
# perform evaluation on classification task
num_fea = 100 # number of selected features
clf = svm.LinearSVC() # linear SVM
correct = 0
for train, test in ss:
# obtain the chi-square score of each feature
score = chi_square.chi_square(X, y)
# rank features in descending order according to score
idx = chi_square.feature_ranking(score)
# obtain the dataset on the selected features
selected_features = X[:, idx[0:num_fea]]
# train a classification model with the selected features on the training dataset
clf.fit(selected_features[train], y[train])
# predict the class labels of test data
y_predict = clf.predict(selected_features[test])
# obtain the classification accuracy on the test data
acc = accuracy_score(y[test], y_predict)
correct = correct + acc
# output the average classification accuracy over all 10 folds
print 'Accuracy:', float(correct)/10
if __name__ == '__main__':
main()
|
mit
| 7,051,745,803,490,119,000 | 31.244898 | 89 | 0.635526 | false | 4.098237 | false | false | false |
ngsxfem/ngsxfem
|
demos/fictdom.py
|
1
|
4669
|
"""
In this example we solve a scalar *unfitted* PDE problem. As a
discretisation method we use a level set based geometry description and
a Cut (or Fictitious) Finite element method with a Nitsche formulation
to impose boundary conditions. For stability we add a ghost penalty
stabilization.
Domain:
-------
The domain is [-1,1]^2 while the interface is a ringe described by a
level set function. In the discretisation the level set function is
approximated with a piecewise linear interpolation. This approximate
geometry is then mapped by applying a mesh deformation resulting in
a higher order geometry approximation.
PDE problem:
------------
- (u_xx + u_yy) = f in Omega (where levelset is negative)
u = 0 on dOmega (where levelset is zero)
The r.h.s. term f is chosen according to a manufactured solution.
Discretisation:
---------------
* Background finite element space restricted to active domain (CutFEM)
* Nitsche formulation to impose boundary conditions, see. e.g. [1]
* Ghost penalty stabilization to deal with bad cuts (version as in [2])
Implementational aspects:
-------------------------
* Geometry approximation using isoparametric unfitted FEM
* A (sparse) direct solver is applied to solve the arising linear systems.
References:
-----------
All concepts that are used here are explained in the jupyter tutorials
`basics.ipynb`, `intlset.ipynb` and `cutfem.ipynb`.
Literature:
-----------
[1] E. Burman, P. Hansbo, Fictitious domain finite element methods using
cut elements: II. A stabilized Nitsche method, Appl. Num. Math.
62(4):328-341, 2012.
[2] J. Preuß, Higher order unfitted isoparametric space-time FEM on
moving domains. Master's thesis, NAM, University of Göttingen, 2018.
"""
# ------------------------------ LOAD LIBRARIES -------------------------------
from netgen.geom2d import SplineGeometry
from ngsolve import *
from ngsolve.internal import *
from xfem import *
from xfem.lsetcurv import *
ngsglobals.msg_level = 2
# -------------------------------- PARAMETERS ---------------------------------
# Quadrilateral (or simplicial mesh)
quad_mesh = False
# Mesh diameter
maxh = 0.1
# Finite element space order
order = 3
# Stabilization parameter for ghost-penalty
gamma_stab = 0.1
# Stabilization parameter for Nitsche
lambda_nitsche = 10 * order * order
# ----------------------------------- MAIN ------------------------------------
# Geometry and Mesh
square = SplineGeometry()
square.AddRectangle((-1, -1), (1, 1), bc=1)
ngmesh = square.GenerateMesh(maxh=maxh, quad_dominated=quad_mesh)
mesh = Mesh(ngmesh)
# Manufactured exact solution for monitoring the error
r2 = 3 / 4 # outer radius
r1 = 1 / 4 # inner radius
rc = (r1 + r2) / 2.0
rr = (r2 - r1) / 2.0
r = sqrt(x**2 + y**2)
levelset = IfPos(r - rc, r - rc - rr, rc - r - rr)
exact = (20 * (r2 - sqrt(x**2 + y**2)) * (sqrt(x**2 + y**2) - r1)).Compile()
coeff_f = - (exact.Diff(x).Diff(x) + exact.Diff(y).Diff(y)).Compile()
# Higher order level set approximation
lsetmeshadap = LevelSetMeshAdaptation(mesh, order=order, threshold=0.1,
discontinuous_qn=True)
deformation = lsetmeshadap.CalcDeformation(levelset)
lsetp1 = lsetmeshadap.lset_p1
# Element, facet and dof marking w.r.t. boundary approximation with lsetp1:
ci = CutInfo(mesh, lsetp1)
hasneg = ci.GetElementsOfType(HASNEG)
hasif = ci.GetElementsOfType(IF)
# facets used for stabilization:
ba_facets = GetFacetsWithNeighborTypes(mesh, a=hasneg, b=hasif)
Vhbase = H1(mesh, order=order, dirichlet=[], dgjumps=True)
Vh = Restrict(Vhbase, hasneg)
gfu = GridFunction(Vh)
u, v = Vh.TrialFunction(), Vh.TestFunction()
h = specialcf.mesh_size
n = Normalize(grad(lsetp1))
# integration domains:
dx = dCut(lsetp1, NEG, definedonelements=hasneg, deformation=deformation)
ds = dCut(lsetp1, IF, definedonelements=hasif, deformation=deformation)
dw = dFacetPatch(definedonelements=ba_facets, deformation=deformation)
a = BilinearForm(Vh, symmetric=False)
# Diffusion term
a += grad(u) * grad(v) * dx
# Nitsche term
a += -grad(u) * n * v * ds
a += -grad(v) * n * u * ds
a += (lambda_nitsche / h) * u * v * ds
# Ghost penalty stabilization (near the boundary)
a += gamma_stab / h**2 * (u - u.Other()) * (v - v.Other()) * dw
# R.h.s. term:
f = LinearForm(Vh)
f += coeff_f * v * dx
# Assemble system
a.Assemble()
f.Assemble()
# Solve linear system
gfu.vec.data = a.mat.Inverse(Vh.FreeDofs()) * f.vec
# Measure the error
l2error = sqrt(Integrate((gfu - exact)**2*dx, mesh))
print("L2 Error: {0}".format(l2error))
# visualization:
Draw(deformation, mesh, "deformation")
DrawDC(lsetp1, gfu, 0, mesh, "uh", deformation=deformation)
|
lgpl-3.0
| 5,536,156,285,712,531,000 | 30.533784 | 79 | 0.674095 | false | 2.976403 | false | false | false |
Esri/raster-functions
|
functions/PercentAboveThreshold.py
|
1
|
5988
|
import numpy as np
from datetime import timedelta
import datetime
#import sys
#import os
#import pickle
#debug_logs_directory =
class PercentAboveThreshold():
def __init__(self):
self.name = 'Percent Above or Below Threshold'
self.description = 'Calculates the percentage of pixels that are above or below' \
'a threshold value. The threshold value is set in the raster function.' \
'The raster function can be applied to a time-enabled stack of rasters in ' \
'a mosaic dataset.'
self.times = []
self.start_year = None
self.end_year = None
self.threshold = 50
def getParameterInfo(self):
return [
{
'name': 'rasters',
'dataType': 'rasters',
'value': None,
'required': True,
'displayName': 'Rasters',
'description': 'The collection of rasters to analyze.',
},
{
'name': 'start_date',
'dataType': 'string',
'value': '1/1/2019 12:30:00',
'required': True,
'displayName': 'Start Date',
'description': 'The beginning date of analysis (inclusive of entire year).',
},
{
'name': 'end_date',
'dataType': 'string',
'value': '12/31/2019 23:30:00',
'required': True,
'displayName': 'End Date',
'description': 'The final date of analysis (inclusive of entire year).',
},
{
'name': 'threshold',
'dataType': 'numeric',
'value': 45,
'required': True,
'displayName': 'Value Threshold',
'description': 'Value Threshold.',
}
]
def getConfiguration(self, **scalars):
return {
'inheritProperties': 4 | 8, # inherit everything but the pixel type (1) and NoData (2)
'invalidateProperties': 2 | 4, # invalidate histogram and statistics because we are modifying pixel values
'inputMask': True, # need raster mask of all input rasters in .updatePixels().
'resampling': False, # process at native resolution
'keyMetadata': ['AcquisitionDate']
}
def updateRasterInfo(self, **kwargs):
# outStats = {'minimum': -1, 'maximum': 1}
# outStatsTuple = tuple(outStats for i in range(outBandCount))
kwargs['output_info']['pixelType'] = 'f4' # output pixels are floating-point values
kwargs['output_info']['histogram'] = () # no statistics/histogram for output raster specified
kwargs['output_info']['statistics'] = () # outStatsTuple
#kwargs['output_info'][
# 'bandCount'] = outBandCount # number of output bands. 7 time bands, 3 TC bands, creates 21 bands
self.times = kwargs['rasters_keyMetadata']
self.start_date = kwargs['start_date']
self.end_date = kwargs['end_date']
self.threshold = int(kwargs['threshold'])
return kwargs
def updateKeyMetadata(self, names, bandIndex, **keyMetadata):
return keyMetadata
def updatePixels(self, tlc, shape, props, **pixelBlocks):
#fname = '{:%Y_%b_%d_%H_%M_%S}_t.txt'.format(datetime.datetime.now())
#filename = os.path.join(debug_logs_directory, fname)
#file = open(filename,"w")
#file.write("File Open.\n")
pix_time = [j['acquisitiondate'] for j in self.times]
#pickle_filename = os.path.join(debug_logs_directory, fname)
#pickle.dump(pix_time, open(pickle_filename[:-4]+'pix_time.p',"wb"))
#file.write(str(len(pix_time))+ "\n")
pix_blocks = pixelBlocks['rasters_pixels']
pix_array = np.asarray(pix_blocks)
#pickle_filename = os.path.join(debug_logs_directory, fname)
#pickle.dump(pix_array, open(pickle_filename[:-4]+'pix_blocks.p',"wb"))
pix_array_dim = pix_array.shape
num_squares_x = pix_array_dim[2]
num_squares_y = pix_array_dim[3]
#file.write("Filtering Based on Time\n")
# This worked before I added time Filtering:
#pix_as_array = np.reshape(pix_array, -1)
#total_count = np.size(pix_as_array)
#vals_above_thresh_count = np.size(np.where(pix_as_array <= self.threshold))
#outBlock = np.ones((num_squares_x, num_squares_y)) * (vals_above_thresh_count / total_count) * 100
t_array = []
ind_array = []
start_date = self.start_date #"1/1/2019 12:30:00"
end_date = self.end_date #"7/7/2019 12:30:00"
start_datetime = datetime.datetime.strptime(start_date, '%m/%d/%Y %H:%M:%S') # %p')
end_datetime = datetime.datetime.strptime(end_date, '%m/%d/%Y %H:%M:%S') # %p')
for ind, time in enumerate(pix_time):
temp_t = datetime.datetime(1900, 1, 1) + timedelta(time - 2)
if temp_t >= start_datetime and temp_t <= end_datetime:
t_array.append(temp_t)
ind_array.append(ind)
#time_within = [pix_time[x] for x in ind_array]
pix_array_within = pix_array[ind_array, :, :, :]
#threshold = 50
pix_as_array = np.reshape(pix_array_within, -1)
total_count = np.size(pix_as_array)
vals_above_thresh_count = np.size(np.where(pix_as_array <= self.threshold)) #< below, > above
outBlock = np.ones((num_squares_x, num_squares_y)) * (vals_above_thresh_count / total_count) * 100
#file.write("DONE\n")
#file.close()
pixelBlocks['output_pixels'] = outBlock.astype(props['pixelType'], copy=False)
#masks = np.array(pixelBlocks['rasters_mask'], copy=False)
#pixelBlocks['output_mask'] = np.all(masks, axis=0).astype('u1', copy=False)
return pixelBlocks
|
apache-2.0
| 2,665,880,069,612,288,500 | 38.92 | 119 | 0.559619 | false | 3.768408 | false | false | false |
frmdstryr/enamlx
|
enamlx/qt/qt_tree_view.py
|
1
|
8321
|
# -*- coding: utf-8 -*-
"""
Copyright (c) 2015, Jairus Martin.
Distributed under the terms of the MIT License.
The full license is in the file COPYING.txt, distributed with this software.
Created on Aug 28, 2015
"""
from atom.api import (
Typed, Instance, Property, Int
)
from enamlx.qt.qt_abstract_item_view import (
QtAbstractItemView, QAbstractAtomItemModel, IS_QT4
)
from enamlx.widgets.tree_view import (
ProxyTreeViewItem, ProxyTreeView, ProxyTreeViewColumn, AbstractWidgetItem
)
from enamlx.qt.qt_abstract_item import AbstractQtWidgetItem, RESIZE_MODES
from qtpy.QtWidgets import QTreeView
from qtpy.QtCore import QAbstractItemModel, QModelIndex
from enaml.core.pattern import Pattern
from enaml.qt.qt_widget import QtWidget
from enaml.application import timed_call
class QAtomTreeModel(QAbstractAtomItemModel, QAbstractItemModel):
def rowCount(self, parent):
d = self.declaration
if d.vertical_headers:
return len(d.vertical_headers)
elif parent.isValid():
item = parent.internalPointer()
d = item.declaration
return len(d.items) if d and not d.is_destroyed else 0
def columnCount(self, parent):
d = self.declaration
if d.horizontal_headers:
return len(d.horizontal_headers)
elif parent.isValid():
item = parent.internalPointer()
d = item.declaration
return len(d._columns) if d and not d.is_destroyed else 0
def index(self, row, column, parent):
""" The index should point to the corresponding QtControl in the
enaml object hierarchy.
"""
item = parent.internalPointer()
#: If the parent is None
d = self.declaration if item is None else item.declaration
if row < len(d._items):
proxy = d._items[row].proxy
assert isinstance(proxy, QtTreeViewItem), \
"Invalid item {}".format(proxy)
else:
proxy = d.proxy
return self.createIndex(row, column, proxy)
def parent(self, index):
if not index.isValid():
return QModelIndex()
item = index.internalPointer()
if not isinstance(item, QtTreeViewItem) or item.is_destroyed:
return QModelIndex()
parent = item.parent()
if not isinstance(parent, QtTreeViewItem) or parent.is_destroyed:
return QModelIndex()
d = parent.declaration
return self.createIndex(d.row, 0, parent)
def itemAt(self, index=None):
if not index or not index.isValid():
return
item = index.internalPointer()
assert isinstance(item, QtTreeViewItem), \
"Invalid index: {} at ({},{}) {}".format(
index, index.row(), index.column(), item)
d = item.declaration
try:
c = index.column() # - d.visible_column
return d._columns[c].proxy
except IndexError:
return
class QtTreeView(QtAbstractItemView, ProxyTreeView):
#: Tree widget
widget = Typed(QTreeView)
#: Root index
index = Instance(QModelIndex, ())
def create_widget(self):
self.widget = QTreeView(self.parent_widget())
def init_widget(self):
super(QtTreeView, self).init_widget()
d = self.declaration
self.set_show_root(d.show_root)
def init_model(self):
self.set_model(QAtomTreeModel(parent=self.widget))
# -------------------------------------------------------------------------
# Widget Setters
# -------------------------------------------------------------------------
def set_show_root(self, show):
self.widget.setRootIsDecorated(show)
def set_cell_padding(self, padding):
self.widget.setStyleSheet(
"QTreeView::item { padding: %ipx }" % padding)
def set_horizontal_minimum_section_size(self, size):
self.widget.header().setMinimumSectionSize(size)
def set_horizontal_stretch(self, stretch):
self.widget.header().setStretchLastSection(stretch)
def set_horizontal_headers(self, headers):
self.widget.header().model().layoutChanged.emit()
def set_resize_mode(self, mode):
if IS_QT4:
self.widget.header().setResizeMode(RESIZE_MODES[mode])
else:
self.widget.header().setSectionResizeMode(RESIZE_MODES[mode])
def set_show_horizontal_header(self, show):
header = self.widget.header()
header.show() if show else header.hide()
# -------------------------------------------------------------------------
# View refresh handlers
# -------------------------------------------------------------------------
def _refresh_visible_column(self, value):
self._pending_column_refreshes -= 1
if self._pending_column_refreshes == 0:
d = self.declaration
# TODO: What about parents???
try:
cols = self.model.columnCount(self.index)-d.visible_columns
d.visible_column = max(0, min(value, cols))
except RuntimeError:
#: Since refreshing is deferred several ms later
pass
def _refresh_visible_row(self, value):
self._pending_row_refreshes -= 1
if self._pending_row_refreshes == 0:
d = self.declaration
try:
rows = self.model.rowCount(self.index)-d.visible_rows
d.visible_row = max(0, min(value, rows))
except RuntimeError:
pass
class AbstractQtTreeViewItem(AbstractQtWidgetItem):
""" Base TreeViewItem class """
#: Pending refreshes when loading widgets
_refresh_count = Int(0)
#: Time to wait before loading widget
_loading_interval = Int(100)
def create_widget(self):
if self.declaration:
for child in self.children():
if isinstance(child, (Pattern, QtWidget)):
self.delegate = child
def set_row(self, row):
self._update_index()
def set_column(self, column):
self._update_index()
def _default_index(self):
d = self.declaration
return self.view.model.index(d.row, d.column, self.parent().index)
def _update_index(self):
self.index = self._default_index()
if self.delegate:
self._refresh_count +=1
timed_call(self._loading_interval, self._update_delegate)
def _update_delegate(self):
""" Update the delegate cell widget. This is deferred so it
does not get called until the user is done scrolling.
"""
self._refresh_count -= 1
if self._refresh_count != 0:
return
try:
delegate = self.delegate
if not self._is_visible():
return
# The table destroys when it goes out of view
# so we always have to make a new one
delegate.create_widget()
delegate.init_widget()
# Set the index widget
self.view.widget.setIndexWidget(self.index, delegate.widget)
except RuntimeError:
# Since this is deferred, the table could be deleted already
# and a RuntimeError is possible
pass
def _is_visible(self):
return self.index.isValid()
def data_changed(self, change):
""" Notify the model that data has changed in this cell! """
self.view.model.dataChanged.emit(self.index, self.index)
class QtTreeViewItem(AbstractQtTreeViewItem, ProxyTreeViewItem):
def _default_view(self):
""" If this is the root item, return the parent
which must be a TreeView, otherwise return the
parent Item's view.
"""
parent = self.parent()
if isinstance(parent, QtTreeView):
return parent
return parent.view
class QtTreeViewColumn(AbstractQtTreeViewItem, ProxyTreeViewColumn):
def _default_view(self):
""" Since the TreeViewColumn must be a child of a TreeViewItem,
simply return the Item's view.
"""
return self.parent().view
def _default_index(self):
d = self.declaration
return self.view.model.index(d.row, d.column, self.parent().index)
|
mit
| -3,384,336,813,083,965,000 | 32.552419 | 79 | 0.592717 | false | 4.304708 | false | false | false |
soarlab/FPTaylor
|
benchmarks/toplas/print_results.py
|
1
|
4016
|
#!/usr/bin/env python
import sys
import os
import glob
import decimal
import argparse
parser = argparse.ArgumentParser(
description="Prints out results of FPTaylor experiments (from the log directory)")
parser.add_argument('--prec', type=int, default=2,
help="precision of printed results")
parser.add_argument('--alt-order', action='store_true',
help="alternative order of printed results")
parser.add_argument('--no-name', action='store_true',
help="do not print names of benchmarks")
parser.add_argument('--no-time', action='store_true',
help="do not print times of benchmarks")
parser.add_argument('--log-dir', default="log",
help="the log directory")
args = parser.parse_args()
if args.alt_order:
benchmark_list = [
"carbon_gas",
"doppler1",
"doppler2",
"doppler3",
"jet",
"predatorPrey",
"rigidBody1",
"rigidBody2",
"sine",
"sineOrder3",
"sqroot",
"t_div_t1",
"turbine1",
"turbine2",
"turbine3",
"verhulst",
"azimuth",
"logexp",
"sphere",
"kepler0",
"kepler1",
"kepler2",
"himmilbeau",
"hartman3",
"hartman6",
"floudas1",
"floudas2",
"floudas3"
]
else:
benchmark_list = [
"t_div_t1",
"sine",
"sqroot",
"sineOrder3",
"carbon_gas",
"verhulst",
"predatorPrey",
"rigidBody1",
"rigidBody2",
"doppler1",
"doppler2",
"doppler3",
"turbine1",
"turbine2",
"turbine3",
"jet",
"logexp",
"sphere",
"azimuth",
"kepler0",
"kepler1",
"kepler2",
"himmilbeau",
"hartman3",
"hartman6",
"floudas1",
"floudas2",
"floudas3"
]
class Problem:
def __init__(self, name, error, time):
self.name = name
self.error_str = "{0:.{prec}e}".format(
decimal.Context(prec=args.prec + 1, rounding=decimal.ROUND_UP).create_decimal(error),
prec=args.prec)
self.time_str = "{0:.1f}".format(time)
def __str__(self):
out = ""
if not args.no_name:
out += self.name + ", "
out += self.error_str
if not args.no_time:
out += ", " + self.time_str
return out
def problem_from_file(fname):
name = None
err_abs = None
err_rel = None
time = None
with open(fname, 'r') as f:
for line in f:
if line.startswith("Problem: "):
name = line[len("Problem: "):].strip()
elif line.startswith("Absolute error (exact): "):
err_abs = line[len("Absolute error (exact): "):].strip()
elif line.startswith("Absolute error (approximate): "):
err_abs = line[len("Absolute error (approximate): "):].strip()
elif line.startswith("Relative error (exact): "):
err_rel = line[len("Relative error (exact): "):].strip()
elif line.startswith("Relative error (approximate): "):
err_rel = line[len("Relative error (approximate): "):].strip()
elif line.startswith("Elapsed time: "):
time = float(line[len("Elapsed time: "):].strip())
if name and (err_abs or err_rel) and time:
return Problem(name, err_abs if err_abs else err_rel, time)
else:
return None
base_dir = args.log_dir
results = {}
for fname in glob.glob(os.path.join(base_dir, "*.log")):
result = problem_from_file(fname)
if result:
results[result.name] = result
for name in benchmark_list:
if name in results:
print(results[name])
del results[name]
if len(results) > 0:
print("\nUnsorted results:")
for _, result in results.iteritems():
print(result)
|
mit
| -7,073,710,044,295,486,000 | 25.596026 | 97 | 0.525149 | false | 3.553982 | false | false | false |
dcramer/taskmaster
|
src/taskmaster/progressbar.py
|
1
|
1033
|
"""
taskmaster.progressbar
~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
from __future__ import absolute_import
from progressbar import ProgressBar, UnknownLength, Counter, Timer
from progressbar.widgets import Widget
class Speed(Widget):
'Widget for showing the rate.'
format = 'Rate: %6.2f/s'
def __init__(self):
self.startval = 0
def update(self, pbar):
'Updates the widget with the current SI prefixed speed.'
if self.startval == 0:
self.startval = pbar.currval
return 'Rate: --/s'
speed = (pbar.currval - self.startval) / pbar.seconds_elapsed
return self.format % speed
class Value(Widget):
def __init__(self, label=None, callback=None):
assert not (label and callback)
self.label = label
self.callback = callback
def update(self, pbar):
if self.callback:
return self.callback(pbar)
return self.label
|
apache-2.0
| 8,523,526,974,073,646,000 | 21.955556 | 69 | 0.617619 | false | 3.783883 | false | false | false |
max291/RLScore
|
rlscore/measure/auc_measure.py
|
1
|
2659
|
import operator
import numpy as np
from rlscore.measure.measure_utilities import UndefinedPerformance
from measure_utilities import multitask
from rlscore.utilities import array_tools
def auc_singletask(Y, P):
#the implementation has n(log(n)) time complexity
#P: predicted labels
#Y: true labels, y_i \in {-1,1} for each y_i \in Y
#
Y = np.array(Y).T[0]
P = np.array(P).T[0]
size = len(P)
#form a list of prediction-label pairs
I = np.argsort(P)
Y = Y[I]
P = P[I]
poscount = 0.
#The number of positive labels that have the same prediction
#as the current P[i] value
posties = 0.
#Number of pairwise mistakes this far
errors = 0.
j = 0
for i in range(size):
#j points always to the next entry in P for which
#P[j] > P[i]. In the end j will point outside of P
if j == i:
poscount += posties
posties = 0.
while j< size and P[i]==P[j]:
if Y[j]==1:
posties += 1
j+=1
if Y[i] == -1:
#every pairwise inversion of positive-negative pair
#incurs one error, except for ties where it incurs 0.5
#errors
errors += poscount+0.5*posties
poscount += posties
#the number of positive-negative pairs
paircount = poscount*(size-poscount)
#AUC is 1 - number of pairwise errors
if paircount == 0:
raise UndefinedPerformance("AUC undefined if both classes not present")
AUC = 1. - errors/paircount
return AUC
def auc_multitask(Y, P):
return multitask(Y, P, auc_singletask)
def auc(Y, P):
"""Area under the ROC curve (AUC).
A performance measure for binary classification problems.
Can be interpreted as an estimate of the probability, that
the classifier is able to discriminate between a randomly
drawn positive and negative training examples. An O(n*log(n))
time implementation, with correction for tied predictions.
If 2-dimensional arrays are supplied as arguments, then AUC
is separately computed for each column, after which the AUCs
are averaged.
Parameters
----------
Y: {array-like}, shape = [n_samples] or [n_samples, n_labels]
Correct labels, must belong to set {-1,1}
P: {array-like}, shape = [n_samples] or [n_samples, n_labels]
Predicted labels, can be any real numbers.
Returns
-------
auc: float
number between 0 and 1
"""
Y = array_tools.as_labelmatrix(Y)
P = array_tools.as_labelmatrix(P)
return np.mean(auc_multitask(Y,P))
auc.iserror = False
|
mit
| -3,690,062,520,798,114,300 | 31.426829 | 79 | 0.618654 | false | 3.642466 | false | false | false |
fahadkaleem/DataStructures
|
LinkedList/Problems/nthNodeFromEndOfLinkedList/Solution2.py
|
1
|
2551
|
"""
Author: Mohammed Fahad Kaleem
Problem: Find the nth node from the end of Linked List
Method:
Maintain two pointers
1. Reference pointer and main pointer.
2. Initialize both reference and main pointers to head.
3. First move reference pointer to n nodes from head.
4. Move both pointers one by one until reference pointer reaches end.
5. Main pointer will point to nth node from the end. Return main pointer.
"""
class Node:
def __init__(self, data, next_node=None):
self.data = data
self.next_node = next_node
def get_data(self):
return self.data
def set_data(self, data):
self.data = data
def get_next_node(self):
return self.next_node
def set_next_node(self, next_node):
self.next_node = next_node
class LinkedList(object):
def __init__(self):
self.head = None
self.length = 0
def insert(self, data):
new_node = Node(data)
if self.length == 0:
self.head = new_node
self.length += 1
else:
current_node = self.head
while current_node.get_next_node() is not None:
current_node = current_node.get_next_node()
current_node.set_next_node(new_node)
self.length += 1
def print_linked_list(self):
if self.length == 0:
print("Linked List is empty")
else:
current_node = self.head
while current_node:
print("[%s]" % current_node.get_data(), end=" ==> ")
current_node = current_node.get_next_node()
print()
def nth_node(self,n):
if self.length == 0:
print("Linked List is empty")
return False
reference_pointer = self.head
main_pointer = self.head
nth_node = self.length - n +1
if nth_node > self.length:
print("Value of n is greater than length of the Linked List")
return False
for i in range(nth_node + 1):
reference_pointer = reference_pointer.get_next_node()
while reference_pointer:
reference_pointer = reference_pointer.get_next_node()
main_pointer = main_pointer.get_next_node()
print(main_pointer.get_data())
return main_pointer.get_data()
if __name__ == "__main__":
linked_list = LinkedList()
linked_list.insert(12)
linked_list.insert(16)
linked_list.insert(3)
linked_list.insert(15)
linked_list.print_linked_list()
linked_list.nth_node(0)
|
mit
| 3,686,608,701,181,479,400 | 28.321839 | 73 | 0.588397 | false | 3.801788 | false | false | false |
gratipay/gratipay.com
|
gratipay/elsewhere/__init__.py
|
1
|
12580
|
"""This subpackage contains functionality for working with accounts elsewhere.
"""
from __future__ import division, print_function, unicode_literals
from collections import OrderedDict
from datetime import datetime
import hashlib
import json
import logging
from urllib import quote
from urlparse import urlparse, urlunparse
import xml.etree.ElementTree as ET
from aspen import log, Response
from aspen.utils import to_age, utc
from oauthlib.oauth2 import TokenExpiredError
from requests_oauthlib import OAuth1Session, OAuth2Session
from gratipay.elsewhere._extractors import not_available
from gratipay.utils.i18n import LocalizedErrorResponse
ACTIONS = {'opt-in', 'connect'}
PLATFORMS = 'facebook google bitbucket bountysource github openstreetmap twitter venmo'.split()
class UnknownAccountElsewhere(Exception): pass
class PlatformRegistry(object):
"""Registry of platforms we support connecting to Gratipay accounts.
"""
def __init__(self, platforms):
self.__dict__ = OrderedDict((p.name, p) for p in platforms)
def __contains__(self, platform):
return platform.name in self.__dict__
def __iter__(self):
return iter(self.__dict__.values())
class UserInfo(object):
"""A simple container for a user's info.
Accessing a non-existing attribute returns `None`.
"""
def __init__(self, **kw):
self.__dict__.update(kw)
def __getattr__(self, key):
return self.__dict__.get(key, None)
def __setattr__(self, key, value):
if value is None:
self.__dict__.pop(key, None)
else:
self.__dict__[key] = value
class Platform(object):
allows_team_connect = False
# "x" stands for "extract"
x_user_info = not_available
x_user_id = not_available
x_user_name = not_available
x_display_name = not_available
x_email = not_available
x_gravatar_id = not_available
x_avatar_url = not_available
x_is_team = not_available
required_attrs = ( 'account_url'
, 'display_name'
, 'name'
)
def __init__(self, api_key, api_secret, callback_url, api_url=None, auth_url=None):
self.api_key = api_key
self.api_secret = api_secret
self.callback_url = callback_url
if api_url:
self.api_url = api_url
if auth_url:
self.auth_url = auth_url
elif not getattr(self, 'auth_url', None):
self.auth_url = self.api_url
# Determine the appropriate response parser using `self.api_format`
api_format = getattr(self, 'api_format', None)
if api_format == 'json':
self.api_parser = lambda r: r.json()
elif api_format == 'xml':
self.api_parser = lambda r: ET.fromstring(r.content)
elif api_format:
raise ValueError('unknown API format: '+str(api_format))
# Make sure the subclass was implemented properly.
missing_attrs = [a for a in self.required_attrs if not hasattr(self, a)]
if missing_attrs:
msg = "The class %s is missing these required attributes: %s"
msg %= self.__class__.__name__, ', '.join(missing_attrs)
raise AttributeError(msg)
def api_get(self, path, sess=None, **kw):
"""
Given a `path` (e.g. /users/foo), this function sends a GET request to
the platform's API (e.g. https://api.github.com/users/foo).
The response is returned, after checking its status code and ratelimit
headers.
"""
is_user_session = bool(sess)
if not sess:
sess = self.get_auth_session()
response = sess.get(self.api_url+path, **kw)
limit, remaining, reset = self.get_ratelimit_headers(response)
if not is_user_session:
self.log_ratelimit_headers(limit, remaining, reset)
# Check response status
status = response.status_code
if status == 401 and isinstance(self, PlatformOAuth1):
# https://tools.ietf.org/html/rfc5849#section-3.2
if is_user_session:
raise TokenExpiredError
raise Response(500)
if status == 404:
raise Response(404, response.text)
if status == 429 and is_user_session:
def msg(_, to_age):
if remaining == 0 and reset:
return _("You've consumed your quota of requests, you can try again in {0}.", to_age(reset))
else:
return _("You're making requests too fast, please try again later.")
raise LocalizedErrorResponse(status, msg)
if status != 200:
log('{} api responded with {}:\n{}'.format(self.name, status, response.text)
, level=logging.ERROR)
msg = lambda _: _("{0} returned an error, please try again later.",
self.display_name)
raise LocalizedErrorResponse(502, msg)
return response
def get_ratelimit_headers(self, response):
limit, remaining, reset = None, None, None
prefix = getattr(self, 'ratelimit_headers_prefix', None)
if prefix:
limit = response.headers.get(prefix+'limit')
remaining = response.headers.get(prefix+'remaining')
reset = response.headers.get(prefix+'reset')
try:
limit, remaining, reset = int(limit), int(remaining), int(reset)
reset = datetime.fromtimestamp(reset, tz=utc)
except (TypeError, ValueError):
d = dict(limit=limit, remaining=remaining, reset=reset)
log('Got weird rate headers from %s: %s' % (self.name, d))
limit, remaining, reset = None, None, None
return limit, remaining, reset
def log_ratelimit_headers(self, limit, remaining, reset):
"""Emit log messages if we're running out of ratelimit.
"""
if None in (limit, remaining, reset):
return
percent_remaining = remaining/limit
if percent_remaining < 0.5:
log_msg = (
'{0} API: {1:.1%} of ratelimit has been consumed, '
'{2} requests remaining, resets {3}.'
).format(self.name, 1 - percent_remaining, remaining, to_age(reset))
log_lvl = logging.WARNING
if percent_remaining < 0.2:
log_lvl = logging.ERROR
elif percent_remaining < 0.05:
log_lvl = logging.CRITICAL
log(log_msg, log_lvl)
def extract_user_info(self, info):
"""
Given a user_info object of variable type (depending on the platform),
extract the relevant information by calling the platform's extractors
(`x_user_name`, `x_user_id`, etc).
Returns a `UserInfo`. The `user_id` attribute is guaranteed to have a
unique non-empty value.
"""
r = UserInfo(platform=self.name)
info = self.x_user_info(r, info, info)
r.user_name = self.x_user_name(r, info, None)
if self.x_user_id.__func__ is not_available:
r.user_id = r.user_name
else:
r.user_id = self.x_user_id(r, info)
assert r.user_id is not None
r.user_id = unicode(r.user_id)
assert len(r.user_id) > 0
r.display_name = self.x_display_name(r, info, None)
r.email = self.x_email(r, info, None)
r.avatar_url = self.x_avatar_url(r, info, None)
if not r.avatar_url:
gravatar_id = self.x_gravatar_id(r, info, None)
if r.email and not gravatar_id:
gravatar_id = hashlib.md5(r.email.strip().lower()).hexdigest()
if gravatar_id:
r.avatar_url = 'https://secure.gravatar.com/avatar/'+gravatar_id
r.is_team = self.x_is_team(r, info, False)
r.extra_info = info
return r
def get_team_members(self, account, page_url=None):
"""Given an AccountElsewhere, return its membership list from the API.
"""
if not page_url:
page_url = self.api_team_members_path.format(
user_id=quote(account.user_id),
user_name=quote(account.user_name or ''),
)
r = self.api_get(page_url)
members, count, pages_urls = self.api_paginator(r, self.api_parser(r))
members = [self.extract_user_info(m) for m in members]
return members, count, pages_urls
def get_user_info(self, key, value, sess=None):
"""Given a user_name or user_id, get the user's info from the API.
"""
if key == 'user_id':
path = 'api_user_info_path'
else:
assert key == 'user_name'
path = 'api_user_name_info_path'
path = getattr(self, path, None)
if not path:
raise Response(400)
path = self._format_path(path, {key: value})
info = self.api_parser(self.api_get(path, sess=sess))
return self.extract_user_info(info)
def _format_path(self, path, values):
parsed = urlparse(path)
quoted_values = {k: quote(v) for k, v in values.items()}
parsed = parsed._replace(path=parsed.path.format(**values))
parsed = parsed._replace(query=parsed.query.format(**quoted_values))
return urlunparse(parsed)
def get_user_self_info(self, sess):
"""Get the authenticated user's info from the API.
"""
r = self.api_get(self.api_user_self_info_path, sess=sess)
info = self.extract_user_info(self.api_parser(r))
token = getattr(sess, 'token', None)
if token:
info.token = json.dumps(token)
return info
def get_friends_for(self, account, page_url=None, sess=None):
if not page_url:
page_url = self.api_friends_path.format(
user_id=quote(account.user_id),
user_name=quote(account.user_name or ''),
)
r = self.api_get(page_url, sess=sess)
friends, count, pages_urls = self.api_paginator(r, self.api_parser(r))
friends = [self.extract_user_info(f) for f in friends]
if count == -1 and hasattr(self, 'x_friends_count'):
count = self.x_friends_count(None, account.extra_info, -1)
return friends, count, pages_urls
class PlatformOAuth1(Platform):
request_token_path = '/oauth/request_token'
authorize_path = '/oauth/authorize'
access_token_path = '/oauth/access_token'
def get_auth_session(self, token=None):
args = ()
if token:
args = (token['token'], token['token_secret'])
return OAuth1Session(self.api_key, self.api_secret, *args,
callback_uri=self.callback_url)
def get_auth_url(self, **kw):
sess = self.get_auth_session()
r = sess.fetch_request_token(self.auth_url+self.request_token_path)
url = sess.authorization_url(self.auth_url+self.authorize_path)
return url, r['oauth_token'], r['oauth_token_secret']
def get_query_id(self, querystring):
return querystring['oauth_token']
def handle_auth_callback(self, url, token, token_secret):
sess = self.get_auth_session(dict(token=token, token_secret=token_secret))
sess.parse_authorization_response(url)
r = sess.fetch_access_token(self.auth_url+self.access_token_path)
sess.token = dict(token=r['oauth_token'],
token_secret=r['oauth_token_secret'])
return sess
class PlatformOAuth2(Platform):
oauth_default_scope = None
oauth_email_scope = None
oauth_payment_scope = None
def get_auth_session(self, state=None, token=None, token_updater=None):
return OAuth2Session(self.api_key, state=state, token=token,
token_updater=token_updater,
redirect_uri=self.callback_url,
scope=self.oauth_default_scope)
def get_auth_url(self, **kw):
sess = self.get_auth_session()
url, state = sess.authorization_url(self.auth_url)
return url, state, ''
def get_query_id(self, querystring):
return querystring['state']
def handle_auth_callback(self, url, state, unused_arg):
sess = self.get_auth_session(state=state)
sess.fetch_token(self.access_token_url,
client_secret=self.api_secret,
authorization_response=url)
return sess
|
mit
| 8,522,709,110,932,166,000 | 36.440476 | 112 | 0.592289 | false | 3.834197 | false | false | false |
jamesmcm/cryptopals
|
basic.py
|
1
|
9677
|
from math import sqrt
from numpy import mean
from Crypto.Cipher import AES
import operator
hexd={"0":0,"1":1,"2":2,"3":3,"4":4,"5":5,"6":6,"7":7,"8":8,"9":9,"a":10,"b":11,"c":12,"d":13,"e":14,"f":15}
b64d={0:"A",16:"Q",32:"g",48:"w",1:"B",17:"R",33:"h",49:"x",2:"C",18:"S",34:"i",50:"y",3:"D",19:"T",35:"j",51:"z",4:"E",20:"U",36:"k",52:"0",5:"F",21:"V",37:"l",53:"1",6:"G",22:"W",38:"m",54:"2",7:"H",23:"X",39:"n",55:"3",8:"I",24:"Y",40:"o",56:"4",9:"J",25:"Z",41:"p",57:"5",10:"K",26:"a",42:"q",58:"6",11:"L",27:"b",43:"r",59:"7",12:"M",28:"c",44:"s",60:"8",13:"N",29:"d",45:"t",61:"9",14:"O",30:"e",46:"u",62:"+",15:"P",31:"f",47:"v",63:"/"}
nhexd = dict (zip(hexd.values(),hexd.keys()))
nb64d= dict (zip(b64d.values(),b64d.keys()))
lf={"a":0.08167,"b":0.01492,"c":0.02782,"d":0.04253,"e":0.12702,"f":0.02228,"g":0.02015,"h":0.06094,"i":0.06966,"j":0.00153,"k":0.00772,"l":0.04025,"m":0.02406,"n":0.06749,"o":0.07507,"p":0.01929,"q":0.00095,"r":0.05987,"s":0.06327,"t":0.09056,"u":0.02758,"v":0.00978,"w":0.02360,"x":0.00150,"y":0.01974,"z":0.00074}
def encode(js):
z = (js[0] << 8) | js[1]
z = (z<<8) | js[2]
js=[]
oc1=16515072&z
oc1=oc1>>18
oc2=258048&z
oc2=oc2>>12
oc3=4032&z
oc3=oc3>>6
oc4=63&z
return [oc1,oc2,oc3,oc4]
def decodehex(s):
out=[]
for i in xrange(len(s)/2):
c=s[2*i:(2*i)+2]
j=16*hexd[c[0]]+hexd[c[1]]
out.append(j)
return out
def hex2b64(s):
out=""
tc=0
js=[]
for i in xrange(len(inputs)/2):
c=inputs[2*i:(2*i)+2]
j=16*hexd[c[0]]+hexd[c[1]]
js.append(j)
tc+=1
if tc==3:
ocs=encode(js)
js=[]
tc=0
#print ocs
for oc in ocs:
out=out+str(b64d[oc])
if tc!=0:
for v in range(3-tc):
js.append(0)
ocs = encode(js)
for oc in ocs:
out=out+str(b64d[oc])
pass
mys=""
for i in range(3-tc):
mys=mys+"="
out=out[:-(3-tc)]+mys
return out
def encodehex(n):
out=""
trigger=False
for i in range(64):
if n/(16**(63-i))>=1 or trigger==True:
trigger=True
#print i, n
if i!=63:
out+=str(nhexd[n/(16**(63-i))])
else:
out+=str(nhexd[n])
n=n-((n/(16**(63-i)))*(16**(63-i)))
if n<0:
n=0
#print out
return out
def createbinary(sl):
out=0
for i in range(len(sl)):
out=out<<8 | sl[i]
return out
def hexstring2ascii(s):
out=""
for i in xrange(len(s)/2):
c=s[2*i:(2*i)+2]
j=16*hexd[c[0]]+hexd[c[1]]
out+=str(chr(j))
return out
def ascii2hex(c):
o=encodehex(c)
if len(o)==1:
o="0"+o
return o
def repeatkeyxor(key,s, tohex=True):
sl=list(s)
out=[]
for i in xrange(len(sl)):
out.append(ord(sl[i])^ord(key[i%len(key)]))
if tohex==True:
return "".join(map(ascii2hex,out))
else:
return "".join(map(chr,out))
def xorstrings(s1,s2):
out=[]
for i in xrange(len(s1)):
out.append(chr(ord(s1[i])^ord(s2[i])))
return "".join(out)
def b642ascii(s):
out=[]
for i in xrange(len(s)/4):
c=s[4*i:(4*i)+4]
#print c
n=0
nulls=0
for z in c:
if z!="=":
n=n<<6 | nb64d[z]
else:
nulls+=1
n=n<<6 | 0
c1=(n&16711680)>>16
c2=(n&65280)>>8
c3=n&255
cs=[c1,c2,c3]
for i in range(3-nulls):
out.append(chr(cs[i]))
return "".join(out)
def hamming(s1,s2):
b1=str2bin(s1)
b2=str2bin(s2)
b=b1^b2
return ones(b)
def computehistogram(block):
myhist={}
chars=0
for k in lf:
myhist[k]=0
for c in block:
c=c.lower()
if c in myhist:
chars+=1
myhist[c]+=1
for k in myhist:
myhist[k]=myhist[k]/float(chars)
return(myhist)
def ascii2hexstring(msg):
return ''.join(x.encode('hex') for x in msg)
def comparehist(hist):
rmse=0
for k in hist:
rmse+=(lf[k]-hist[k])**2
return rmse
def str2bin(s):
o=0
for c in s:
o=o << 8 | ord(c)
return o
def ones(n):
w = 0
while (n):
w += 1
n &= n - 1
return w
def decryptxor(k,s):
return repeatkeyxor(k,s,tohex=False)
def decryptECBAES(k,s):
cipher = AES.new(k, AES.MODE_ECB, "ignoreIV")
msg = cipher.decrypt(s)
return msg
def encryptECBAES(k,s):
cipher = AES.new(k, AES.MODE_ECB, "ignoreIV")
msg = cipher.encrypt(s)
return msg
def splitblocks(s,keysize):
blocks=[]
for i in xrange((len(s)/keysize)+1):
if i!=len(s)/keysize:
blocks.append(s[i*keysize:(i+1)*keysize])
else:
if len(s[i*keysize:])>0:
blocks.append(s[i*keysize:])
return blocks
if __name__=="__main__":
#Q1
print "Q1"
inputs="49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f69736f6e6f7573206d757368726f6f6d"
print hex2b64(inputs)
#Q2
print "Q2"
s1=decodehex("1c0111001f010100061a024b53535009181c")
s2=decodehex("686974207468652062756c6c277320657965")
print encodehex(createbinary(s1)^createbinary(s2))
#Q3
print "Q3"
s=decodehex("1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736")
for i in range(20,120):
cur=map(chr,map(lambda x: x^i, s))
if all(map(lambda x: x>=32 and x<=126, map(ord, cur))):
if cur.count("a")/float(len(cur))>0.03 and cur.count("e")/float(len(cur))>0.01 and cur.count(" ")/float(len(cur))>0.01:
print "".join(cur)
print "Key: " + chr(i)
#Q4
print "Q4"
f=open("4.txt","r")
for line in f:
s=decodehex(line)
for i in range(20,120):
cur=map(chr,map(lambda x: x^i, s))
if sum(map(lambda x: x>=32 and x<=126, map(ord, cur)))/float(len(cur))>0.96:
if cur.count("t")+cur.count("T")>cur.count("p")+cur.count("P") and cur.count("e")+cur.count("E")>cur.count("z")+cur.count("Z") and cur.count("e")+cur.count("E")>cur.count("L")+cur.count("l"):
if cur.count("a")/float(len(cur))>0.03 and cur.count("e")/float(len(cur))>0.01 and cur.count(" ")/float(len(cur))>0.01:
print "".join(cur)
print "Key: " + str(chr(i)) + ", Line: " + line
#Q5
print "Q5"
s="Burning 'em, if you ain't quick and nimble\nI go crazy when I hear a cymbal"
k="ICE"
out=repeatkeyxor(k,s)
print repeatkeyxor(k,s)
print hexstring2ascii(repeatkeyxor(k,hexstring2ascii(out)))
#Q6
out=""
f=open("6.txt","r")
for line in f:
out+=line.strip()
s=b642ascii(out)
ksd={}
for keysize in xrange(1,40):
numbytes=8*keysize
numchars=(1+(keysize/4))*4
c1=s[:keysize]
c2=s[keysize:2*keysize]
c3=s[2*keysize:3*keysize]
c4=s[3*keysize:4*keysize]
c5=s[4*keysize:5*keysize]
diff=mean([hamming(c1,c2)/float(keysize),hamming(c1,c3)/float(keysize),hamming(c2,c3)/float(keysize),hamming(c4,c5)/float(keysize),hamming(c2,c4)/float(keysize),hamming(c1,c5)/float(keysize)])
ksd[keysize]=diff
#From ksd we see keysize is 19 (not 5 or 2!)
s=b642ascii(out)
keysize=29
#split string to blocks
blocks=[]
for i in xrange((len(s)/keysize)+1):
if i!=len(s)/keysize:
blocks.append(s[i*keysize:(i+1)*keysize])
else:
if len(s[i*keysize:])>0:
blocks.append(s[i*keysize:])
#transpose blocks
newblocks=[]
for i in xrange(keysize):
newblocks.append([])
for block in blocks:
for j in xrange(len(block)):
newblocks[j].append(block[j])
key=[]
keyds=[]
for block in newblocks:
minscore=float("infinity")
bestc=None
keyd={}
for keyc in range(32,123):
decrypt=map(lambda x: chr(ord(x)^keyc),block)
score=comparehist(computehistogram(decrypt))
keyd[chr(keyc)]=score
#print score
if score<minscore:
minscore=score
bestc=chr(keyc)
key.append(bestc)
keyds.append(keyd)
print "Key: " + "".join(key)
#After fixing case:
key="Terminator X: Bring the noise"
#can we fix this automatically?
print decryptxor("".join(key),s)
#Q7
#OpenSSL example
#echo -n "0123456789abcdef0123456789abcdef" | openssl aes-128-ecb -nosalt -nopad -K "59454c4c4f57205355424d4152494e45" | xxd
key = b'YELLOW SUBMARINE'
cipher = AES.new(key, AES.MODE_ECB, "")
f=open("7.txt","r")
s=b""
for line in f:
s+=line.strip()
s=b642ascii(s)
f.close()
key = b'YELLOW SUBMARINE'
cipher = AES.new(key, AES.MODE_ECB, "ignoreIV")
msg = cipher.decrypt(s)
#print msg
#Q8
f=open("8.txt","r")
cps=[]
for line in f:
cps.append(line.strip())
f.close()
lenblock=32
simd={}
for z in xrange(len(cps)):
c=cps[z]
count=0
for i in xrange(len(c)/lenblock):
for j in xrange(i+1,(len(c)/lenblock)):
if c[i*lenblock:(i+1)*lenblock] == c[j*lenblock:(j+1)*lenblock]:
count+=1
simd[z]=count
sorted_x = sorted(simd.items(), key=operator.itemgetter(1), reverse=True) #here we see 132 has the most repeats (entirely repeats)
#print cps[132]
|
gpl-2.0
| -7,439,320,829,853,359,000 | 27.461765 | 444 | 0.521959 | false | 2.644712 | false | false | false |
gprakhar/scripts-biopython
|
hmmer-pipeline-DPM_Dicty.py
|
1
|
1173
|
#Script to rum Hmmer on Dicty Protiens
#Run it keeping in mind that HMM(s) from a particular DPM human holmolog protien are all checked against every Protien from Dicty
#Author : prakhar gaur
#date : Wed 16 July IST 2015
import os
import argparse
import csv
parser = argparse.ArgumentParser()
parser.add_argument('-C', '--numberofcores', help='Number of cores to run the blast on', type=int)
parser.add_argument('hmmfile', metavar='F', help='csv file with hmm entries, with first entry in each row as Uniprot id')
args = parser.parse_args()
cores = args.numberofcores
inputfileName = str(args.hmmfile)
hmmerExe = r'/home/interns/CHG_Nunjundiah-Project/local-bin/hmmer/hmmer-3.1b2-linux-intel-x86_64/binaries/'
pfamid = list()
with open(inputfileName) as inputfileHandle:
keywordString = csv.reader(inputfileHandle)
for row in keywordString:
pfamid.append(row)
for idlist in pfamid:
for item in idlist[1:]:
hmmsearch_cmd = '%shmmsearch --cpu %d /home/interns/CHG_Nunjundiah-Project/raw-data/DPM-prot-HMM/%s.hmm dicty_primary_protein.fa >> %s.out' % (hmmerExe, cores, item, idlist[0])
print hmmsearch_cmd
os.system(hmmsearch_cmd)
|
gpl-3.0
| -1,187,990,765,733,363,000 | 38.1 | 178 | 0.736573 | false | 2.867971 | false | false | false |
ocelot-collab/ocelot
|
unit_tests/ebeam_test/dba_tracking/dba_tracking_conf.py
|
1
|
1026
|
"""Test parameters description"""
import pytest
from ocelot import *
"""lattice elements descripteion"""
Q1 = Quadrupole(l=0.4, k1=-1.3, eid="Q1")
Q2 = Quadrupole(l=0.8, k1=1.4, eid="Q2")
Q3 = Quadrupole(l=0.4, k1=-1.7, eid="Q3")
Q4 = Quadrupole(l=0.5, k1=1.19250444829, eid="Q4")
B = Bend(l=2.7, k1=-.06, angle=2*pi/16., e1=pi/16., e2=pi/16., eid= "B")
SF = Sextupole(l=0.01, k2=150.0, eid="SF") #random value
SD = Sextupole(l=0.01, k2=-150.0, eid="SD") #random value
D1 = Drift(l=2., eid= "D1")
D2 = Drift(l=0.6, eid= "D2")
D3 = Drift(l=0.3, eid= "D3")
D4 = Drift(l=0.7, eid= "D4")
D5 = Drift(l=0.9, eid= "D5")
D6 = Drift(l=0.2, eid= "D6")
"""pytest fixtures description"""
@pytest.fixture(scope='module')
def cell():
return (D1, Q1, D2, Q2, D3, Q3, D4, B, D5, SD, D5, SF, D6, Q4, D6, SF, D5, SD,D5, B, D4, Q3, D3, Q2, D2, Q1, D1)
@pytest.fixture(scope='module')
def method():
return MethodTM()
@pytest.fixture(scope='module')
def lattice(cell, method):
return MagneticLattice(cell, method=method)
|
gpl-3.0
| 2,305,508,052,010,516,700 | 24.02439 | 116 | 0.61306 | false | 1.954286 | true | false | false |
kepstin/picard
|
picard/util/tags.py
|
1
|
3767
|
# -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
# Copyright (C) 2007 Lukáš Lalinský
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
TAG_NAMES = {
'album': N_('Album'),
'artist': N_('Artist'),
'title': N_('Title'),
'date': N_('Date'),
'originaldate': N_('Original Release Date'),
'albumartist': N_('Album Artist'),
'tracknumber': N_('Track Number'),
'totaltracks': N_('Total Tracks'),
'discnumber': N_('Disc Number'),
'totaldiscs': N_('Total Discs'),
'albumartistsort': N_('Album Artist Sort Order'),
'artistsort': N_('Artist Sort Order'),
'titlesort': N_('Title Sort Order'),
'albumsort': N_('Album Sort Order'),
'asin': N_('ASIN'),
'grouping': N_('Grouping'),
'version': N_('Version'),
'isrc': N_('ISRC'),
'mood': N_('Mood'),
'bpm': N_('BPM'),
'copyright': N_('Copyright'),
'license': N_('License'),
'composer': N_('Composer'),
'writer': N_('Writer'),
'conductor': N_('Conductor'),
'lyricist': N_('Lyricist'),
'arranger': N_('Arranger'),
'producer': N_('Producer'),
'engineer': N_('Engineer'),
'subtitle': N_('Subtitle'),
'discsubtitle': N_('Disc Subtitle'),
'remixer': N_('Remixer'),
'musicbrainz_trackid': N_('MusicBrainz Recording Id'),
'musicbrainz_albumid': N_('MusicBrainz Release Id'),
'musicbrainz_artistid': N_('MusicBrainz Artist Id'),
'musicbrainz_albumartistid': N_('MusicBrainz Release Artist Id'),
'musicbrainz_workid': N_('MusicBrainz Work Id'),
'musicbrainz_releasegroupid': N_('MusicBrainz Release Group Id'),
'musicbrainz_discid': N_('MusicBrainz Disc Id'),
'musicbrainz_sortname': N_('MusicBrainz Sort Name'),
'musicip_puid': N_('MusicIP PUID'),
'musicip_fingerprint': N_('MusicIP Fingerprint'),
'acoustid_id': N_('AcoustID'),
'acoustid_fingerprint': N_('AcoustID Fingerprint'),
'discid': N_('Disc Id'),
'website': N_('Website'),
'compilation': N_('Compilation'),
'comment:': N_('Comment'),
'genre': N_('Genre'),
'encodedby': N_('Encoded By'),
'performer:': N_('Performer'),
'releasetype': N_('Release Type'),
'releasestatus': N_('Release Status'),
'releasecountry': N_('Release Country'),
'label': N_('Record Label'),
'barcode': N_('Barcode'),
'catalognumber': N_('Catalog Number'),
'format': N_('Format'),
'djmixer': N_('DJ-Mixer'),
'media': N_('Media'),
'lyrics:': N_('Lyrics'),
'mixer': N_('Mixer'),
'language': N_('Language'),
'script': N_('Script'),
'~length': N_('Length'),
'~rating': N_('Rating'),
}
def display_tag_name(name):
if ':' in name:
name, desc = name.split(':', 1)
name = _(TAG_NAMES.get(name + ':', name))
return '%s [%s]' % (_(name), desc)
else:
new_name = TAG_NAMES.get(name)
if new_name is None:
new_name = TAG_NAMES.get(name + ':')
if new_name is None:
return _(name)
else:
return '%s []' % (_(new_name),)
else:
return _(new_name)
|
gpl-2.0
| 5,036,923,362,161,006,000 | 35.192308 | 80 | 0.591923 | false | 3.403255 | false | false | false |
mjsauvinen/P4UL
|
pyLib/netcdfTools.py
|
1
|
11278
|
#!/usr/bin/env python3
import netCDF4 as nc
import sys
import argparse
import numpy as np
from utilities import partialMatchFromList
debug = True
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def asciiEncode(uList, uStr):
n = len(uList)
if(n > 0):
uList = list(uList) # This might be a tuple coming in
for i in range(len(uList)):
if isinstance(uList[i], bytes): uList[i] = uList[i].decode()
else:
print(' Dictionary {} has zero length. Exiting ...'.format(uStr))
sys.exit(1)
return uList
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def netcdfDataset(filename, verbose=True):
# Create Dataset
ds = nc.Dataset(filename)
# Generate a list of variables and independent variables contained in the file.
varList = asciiEncode(ds.variables.keys(), 'Variables')
dimList = asciiEncode(ds.dimensions.keys(), 'Dimensions')
if(verbose):
print(' Variable List : {} '.format(varList))
print(' Dimension List : {} '.format(dimList))
return ds, varList, dimList
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def netcdfOutputDataset(filename, mode='w'):
if( isinstance( filename, bytes ) ):
filename = filename.decode()
dso = nc.Dataset(filename, mode, format='NETCDF4')
return dso
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def netcdfWriteAndClose(dso, verbose=True):
if(verbose):
print('Writing of output data .... ')
dso.close()
if(verbose):
print(' ... done. File closed.')
dso = None
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def read1DVariableFromDataset( dimStr, varStr, ds, iLOff=0, iROff=0, cl=1):
# iLOff: left offset
# iROff: right offset
# cl : coarsening level
if(varStr in ds.variables.keys()):
vs = ds.variables[varStr]
dimList = vs.dimensions # return a list of variable dimensions ('time', 'x', 'y', etc.)
print(' dimList = {} '.format( dimList ))
vdim = partialMatchFromList( dimStr, dimList )
try:
print(' Reading variable {} ... '.format(vdim))
if(iROff == 0 or (iROff is None) ):
var = ds.variables[vdim][(0 + iLOff):]
else:
var = ds.variables[vdim][(0 + iLOff):-abs(iROff)]
print(' ... done.')
except:
print(' Cannot read the array of variable: {}.'.format(varStr))
sys.exit(1)
else:
print(' Variable {} not in list {}.'.format(varStr, ds.variables.keys()))
sys.exit(1)
return var[::cl], np.shape(var[::cl])
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def readVariableFromDataset(varStr, ds, cl=1 ):
if( varStr in ds.variables.keys() ):
vdims = asciiEncode(ds.variables[varStr].dimensions, ' Variable dimensions ')
if( len(vdims) == 4 ):
var = ds.variables[varStr][:,::cl,::cl,::cl]
elif( len(vdims) == 3 and 'time' not in vdims ):
var = ds.variables[varStr][::cl,::cl,::cl]
elif( len(vdims) == 3 and 'time' in vdims ):
var = ds.variables[varStr][:,::cl,::cl]
elif( len(vdims) == 2 and 'time' not in vdims ):
var = ds.variables[varStr][::cl,::cl]
elif( len(vdims) == 2 and 'time' in vdims ):
print(' {} {} '.format(varStr, ds.variables[varStr][:].shape ))
var = ds.variables[varStr][:,::cl]
elif( len(vdims) == 1 and 'time' in vdims ):
var = ds.variables[varStr]
else:
var = ds.variables[varStr][::cl]
# Load the independent variables and wrap them into a dict
dDict = dict()
for dname in vdims:
dData = ds.variables[dname][:]
if( 'time' in dname ): dDict[dname] = dData
else: dDict[dname] = dData[::cl]
dData = None
else:
sys.exit(' Variable {} not in list {}.'.format(varStr, ds.variables.keys()))
return var, dDict
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def read3DVariableFromDataset(varStr, ds, iTOff=0, iLOff=0, iROff=0, cl=1, meanOn=False):
# iLOff: left offset
# iROff: right offset
# cl : coarsening level
varStr = partialMatchFromList( varStr , ds.variables.keys() )
print(' Reading variable {} ... '.format(varStr))
var, dDict = readVariableFromDataset(varStr, ds, cl=1 )
print(' ... done.')
iL = 0 + int(iLOff/cl)
iR = int(abs(iROff/cl))
iT = 0 + int(iTOff)
if(iR == 0):
# Param list (time, z, y, x )
if(meanOn):
vo = var[iL:, iL:, iL:]
else:
vo = var[iT:, iL:, iL:, iL:]
else:
if(meanOn):
vo = var[iL:-iR, iL:-iR, iL:-iR]
else:
vo = var[iT:, iL:-iR, iL:-iR, iL:-iR]
var = None
return vo, np.array(vo.shape)
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def read3dDataFromNetCDF( fname, varStr, cl=1, zeroNans=True ):
'''
Establish two boolean variables which indicate whether the created variable is an
independent or dependent variable in function createNetcdfVariable().
'''
parameter = True; variable = False
'''
Create a NETCDF input dataset (ds), and its associated lists of dependent (varList)
and independent (dimList) variables.
'''
ds, varList, paramList = netcdfDataset(fname)
varStr = partialMatchFromList( varStr , varList )
print(' Extracting {} from dataset in {} ... '.format( varStr, fname ))
var, dDict = readVariableFromDataset(varStr, ds, cl )
print(' {}_dims = {}\n Done!'.format(varStr, var.shape ))
# Rename the keys in dDict to simplify the future postprocessing
for dn in dDict.keys():
if( zeroNans ):
idNan = np.isnan(dDict[dn]); dDict[dn][idNan] = 0.
if( 'time' in dn and 'time' != dn ):
dDict['time'] = dDict.pop( dn )
elif( 'x' == dn[0] and 'x' != dn ):
dDict['x'] = dDict.pop( dn )
elif( 'y' == dn[0] and 'y' != dn ):
dDict['y'] = dDict.pop( dn )
elif( 'z' == dn[0] and 'z' != dn ):
dDict['z'] = dDict.pop( dn )
else: pass
# Append the variable into the dict.
dDict['v'] = var
return dDict
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def interpolatePalmVectors(v0, vc_dims, cmpStr, meanOn=False):
icmp = int()
iOn = False
jOn = False
kOn = False
kCopy = False
if(cmpStr == 'i'):
icmp = 3
iOn = True
elif(cmpStr == 'j'):
icmp = 2
jOn = True
elif(cmpStr == 'k'):
icmp = 1
kOn = True
elif(cmpStr == 'kc'):
icmp = 1
kCopy = True
else:
print('Invalid component string: {}. Exiting ...'.format(cmpStr))
sys.exit(1)
vc = np.zeros(vc_dims)
if(meanOn):
vm = np.zeros(vc_dims[1:])
else:
vm = np.array([]) # Empty array.
# Create index arrays for interpolation.
jl = np.arange(0, vc_dims[icmp]); jr = jl + 1 # x,y,z: left < right
nTo, nzo, nyo, nxo = np.shape(v0)
nTimes, nz, ny, nx = vc_dims
if( nz == nzo ): k1 = 0
else: k1 = 1
for i in range(nTimes):
tmp0 = v0[i, :, :, :].copy()
if(iOn):
tmp1 = (tmp0[:, :, jl] + tmp0[:, :, jr]) * 0.5; tmp0 = None
tmp2 = tmp1[k1:, 0:-1, :]
if(jOn):
tmp1 = (tmp0[:, jl, :] + tmp0[:, jr, :]) * 0.5; tmp0 = None
tmp2 = tmp1[k1:, :, 0:-1]
if(kOn):
tmp1 = (tmp0[jl, :, :] + tmp0[jr, :, :]) * 0.5; tmp0 = None
tmp2 = tmp1[:, 0:-1, 0:-1]
if( kCopy ):
tmp1 = tmp0[jl, :, :]; tmp0 = None
tmp2 = tmp1[:, 0:-1, 0:-1]
tmp1 = None
vc[i, :, :, :] = tmp2
if(meanOn):
vm += tmp2.copy()
# Clear memory.
tmp0 = None
tmp1 = None
tmp2 = None
if(meanOn):
vm /= float(nTimes)
print(' Interpolation along the {}^th direction completed.'.format(cmpStr))
return vc, vm # vm is empty if meanOn=False.
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def vectorPrimeComponent(vc, vm):
vc_dims = np.shape(vc)
vp = np.zeros(np.shape(vc))
nTimes = vc_dims[0]
print(' Computing primes for {} times ... '.format(nTimes))
for i in range(nTimes):
vp[i, :, :, :] = vc[i, :, :, :] - vm[:, :, :]
print(' ... done.')
return vp
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def createNetcdfVariable(dso, v, vName, vLen, vUnits, vType, vTuple, parameter, zlib=False, fill_value=None,verbose=True):
if(parameter):
dso.createDimension(vName, vLen)
var = dso.createVariable(vName, vType, vTuple, zlib=zlib, fill_value=fill_value)
var.units = vUnits
var[:] = v
v = None
if(parameter):
pStr = 'parameter'
else:
pStr = 'variable'
if(verbose):
print(' NetCDF {} {} successfully created. '.format(pStr, vName))
return var
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def createCoordinateAxis(dso, Rdims, Rdpx, axis, varname, formatstr, unit, parameter, zlib=False, verbose=True, offset=0.0):
arr = np.empty(Rdims[axis])
for i in range(Rdims[axis]):
# dpx is in [N,E], see getGeoTransform() in gdalTools.py
arr[i] = np.maximum(0.0, i + offset) * Rdpx[axis]
axvar = createNetcdfVariable( \
dso, arr, varname, len(arr), unit, formatstr, (varname,), parameter, zlib, verbose=verbose )
arr = None
return axvar
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def fillTopographyArray(Rtopo, Rdims, Rdpx, datatype):
topodims = np.array([Rdims[2], Rdims[0], Rdims[1]])
topo = np.zeros(topodims, dtype=datatype)
print(' \n Filling 3D array from topography data...')
print(' Dimensions [z,y,x]: [{}, {}, {}]'.format(*topodims))
print(' Total number of data points: {}'.format(np.prod(topodims)))
for x in range(Rdims[1]):
for y in range(Rdims[0]):
# Reverse the y-axis because of the top-left origo in raster
maxind = int(round(Rtopo[-y - 1][x] / Rdpx[2]))+1
if(maxind>1):
topo[0:maxind, y, x] = 1
print(' ...done. \n')
return topo
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def read3dDictVarFromNetCDF( fname, nameDict, cl=1 ):
'''
Establish two boolean variables which indicate whether the created variable is an
independent or dependent variable in function createNetcdfVariable().
'''
parameter = True; variable = False
'''
Create a NETCDF input dataset (ds), and its associated lists of dependent (varList)
and independent (dimList) variables.
'''
ds, varList, paramList = netcdfDataset(fname)
'''
Read cell center coordinates and time.
Create the output independent variables right away and empty memory.
'''
time, time_dims = read1DVariableFromDataset('time', nameDict['varname'], ds, 0, 0, 1 ) # All values.
x, x_dims = read1DVariableFromDataset(nameDict['xname'], nameDict['varname'], ds, 0, 0, cl )
y, y_dims = read1DVariableFromDataset(nameDict['yname'], nameDict['varname'], ds, 0, 0, cl )
z, z_dims = read1DVariableFromDataset(nameDict['zname'], nameDict['varname'], ds, 0, 0, cl )
x[np.isnan(x)] = 0. # Clear away NaNs
y[np.isnan(y)] = 0. #
z[np.isnan(z)] = 0. #
'''
Read in the velocity components.
PALM netCDF4:
u(time, zu_3d, y, xu)
v(time, zu_3d, yv, x)
w(time, zw_3d, y, x)
'''
print(' Extracting {} from dataset ... '.format( nameDict['varname'] ))
v, v_dims = read3DVariableFromDataset(nameDict['varname'], ds, 0, 0, 0, cl) # All values.
print(' {}_dims = {}\n Done!'.format(nameDict['varname'], v_dims ))
dataDict = dict()
dataDict['v'] = v
dataDict['x'] = x
dataDict['y'] = y
dataDict['z'] = z
dataDict['time'] = time
return dataDict
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
|
mit
| -815,513,492,057,849,600 | 27.917949 | 124 | 0.561092 | false | 2.617313 | false | false | false |
clemsos/mitras
|
tests/test_clustering.py
|
1
|
1207
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import numpy as np
from test_helpers import TestHelpers
helpers=TestHelpers()
helpers.add_relative_path()
from lib.api import Similarity_API
from lib.clusters import get_linkage_matrix
from time import time
# from scipy.cluster.hierarchy import linkage, dendrogram, leaves_list
import fastcluster
t0=time()
path="/home/clemsos/Dev/mitras/data/tmp"
chunk_size=2500 # cut the whole dataset into chunks so it can be processed
protomemes_count= 43959#db["hashtags"].count()
api=Similarity_API(path,protomemes_count,chunk_size)
print
sims=api.get_similarity_matrix()
print sims.shape
similarity_treshold = 0.7 # minimum value of similarity between protomemes
similar_protomemes_treshold=20
print 'getting rows with %d protomemes that are at least %.3fx similar'%(similar_protomemes_treshold,similarity_treshold)
# get row numbers
remarquable_rows=np.where((sims > similarity_treshold).sum(axis=1) >= similar_protomemes_treshold)[0]
# print type(remarquable_rows)
print "%d memes found"%len(remarquable_rows)
print remarquable_rows
# get memes data
print " done in %.3fs"%(time()-t0)
print
print
print " done in %.3fs"%(time()-t0)
|
mit
| -5,519,701,892,024,718,000 | 23.16 | 121 | 0.763877 | false | 3.055696 | false | true | false |
nafitzgerald/allennlp
|
allennlp/modules/alternating_highway_lstm.py
|
1
|
13051
|
from typing import Tuple
from overrides import overrides
import torch
from torch.autograd import Function, Variable
from torch.nn import Parameter
from torch.nn.utils.rnn import PackedSequence, pad_packed_sequence, pack_padded_sequence
from allennlp.nn.initializers import block_orthogonal
from allennlp.custom_extensions._ext import highway_lstm_layer
class _AlternatingHighwayLSTMFunction(Function):
def __init__(self, input_size: int, hidden_size: int, num_layers: int, train: bool) -> None:
super(_AlternatingHighwayLSTMFunction, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.num_layers = num_layers
self.train = train
@overrides
def forward(self, # pylint: disable=arguments-differ
inputs: torch.Tensor,
weight: torch.Tensor,
bias: torch.Tensor,
state_accumulator: torch.Tensor,
memory_accumulator: torch.Tensor,
dropout_mask: torch.Tensor,
lengths: torch.Tensor,
gates: torch.Tensor) -> Tuple[torch.Tensor, None]:
sequence_length, batch_size, input_size = inputs.size()
tmp_i = inputs.new(batch_size, 6 * self.hidden_size)
tmp_h = inputs.new(batch_size, 5 * self.hidden_size)
is_training = 1 if self.train else 0
highway_lstm_layer.highway_lstm_forward_cuda(input_size, # type: ignore # pylint: disable=no-member
self.hidden_size,
batch_size,
self.num_layers,
sequence_length,
inputs,
lengths,
state_accumulator,
memory_accumulator,
tmp_i,
tmp_h,
weight,
bias,
dropout_mask,
gates,
is_training)
self.save_for_backward(inputs, lengths, weight, bias, state_accumulator,
memory_accumulator, dropout_mask, gates)
# The state_accumulator has shape: (num_layers, sequence_length + 1, batch_size, hidden_size)
# so for the output, we want the last layer and all but the first timestep, which was the
# initial state.
output = state_accumulator[-1, 1:, :, :]
return output, state_accumulator[:, 1:, :, :]
@overrides
def backward(self, grad_output, grad_hy): # pylint: disable=arguments-differ
(inputs, lengths, weight, bias, state_accumulator, # pylint: disable=unpacking-non-sequence
memory_accumulator, dropout_mask, gates) = self.saved_tensors
inputs = inputs.contiguous()
sequence_length, batch_size, input_size = inputs.size()
parameters_need_grad = 1 if self.needs_input_grad[1] else 0 # pylint: disable=unsubscriptable-object
grad_input = inputs.new().resize_as_(inputs).zero_()
grad_state_accumulator = inputs.new().resize_as_(state_accumulator).zero_()
grad_memory_accumulator = inputs.new().resize_as_(memory_accumulator).zero_()
grad_weight = inputs.new()
grad_bias = inputs.new()
grad_dropout = None
grad_lengths = None
grad_gates = None
if parameters_need_grad:
grad_weight.resize_as_(weight).zero_()
grad_bias.resize_as_(bias).zero_()
tmp_i_gates_grad = inputs.new().resize_(batch_size, 6 * self.hidden_size).zero_()
tmp_h_gates_grad = inputs.new().resize_(batch_size, 5 * self.hidden_size).zero_()
is_training = 1 if self.train else 0
highway_lstm_layer.highway_lstm_backward_cuda(input_size, # pylint: disable=no-member
self.hidden_size,
batch_size,
self.num_layers,
sequence_length,
grad_output,
lengths,
grad_state_accumulator,
grad_memory_accumulator,
inputs,
state_accumulator,
memory_accumulator,
weight,
gates,
dropout_mask,
tmp_h_gates_grad,
tmp_i_gates_grad,
grad_hy,
grad_input,
grad_weight,
grad_bias,
is_training,
parameters_need_grad)
return (grad_input, grad_weight, grad_bias, grad_state_accumulator,
grad_memory_accumulator, grad_dropout, grad_lengths, grad_gates)
class AlternatingHighwayLSTM(torch.nn.Module):
"""
A stacked LSTM with LSTM layers which alternate between going forwards over
the sequence and going backwards, with highway connections between each of
the alternating layers. This implementation is based on the description in
`Deep Semantic Role Labelling - What works and what's next
<https://homes.cs.washington.edu/~luheng/files/acl2017_hllz.pdf>`_ .
Parameters
----------
input_size : int, required
The dimension of the inputs to the LSTM.
hidden_size : int, required
The dimension of the outputs of the LSTM.
num_layers : int, required
The number of stacked LSTMs to use.
recurrent_dropout_probability: float, optional (default = 0.0)
The dropout probability to be used in a dropout scheme as stated in
`A Theoretically Grounded Application of Dropout in Recurrent Neural Networks
<https://arxiv.org/abs/1512.05287>`_ .
Returns
-------
output : PackedSequence
The outputs of the interleaved LSTMs per timestep. A tensor of shape
(batch_size, max_timesteps, hidden_size) where for a given batch
element, all outputs past the sequence length for that batch are
zero tensors.
"""
def __init__(self,
input_size: int,
hidden_size: int,
num_layers: int = 1,
recurrent_dropout_probability: float = 0) -> None:
super(AlternatingHighwayLSTM, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.num_layers = num_layers
self.recurrent_dropout_probability = recurrent_dropout_probability
self.training = True
# Input dimensions consider the fact that we do
# all of the LSTM projections (and highway parts)
# in a single matrix multiplication.
input_projection_size = 6 * hidden_size
state_projection_size = 5 * hidden_size
bias_size = 5 * hidden_size
# Here we are creating a single weight and bias with the
# parameters for all layers unfolded into it. This is necessary
# because unpacking and re-packing the weights inside the
# kernel would be slow, as it would happen every time it is called.
total_weight_size = 0
total_bias_size = 0
for layer in range(num_layers):
layer_input_size = input_size if layer == 0 else hidden_size
input_weights = input_projection_size * layer_input_size
state_weights = state_projection_size * hidden_size
total_weight_size += input_weights + state_weights
total_bias_size += bias_size
self.weight = Parameter(torch.FloatTensor(total_weight_size))
self.bias = Parameter(torch.FloatTensor(total_bias_size))
self.reset_parameters()
def reset_parameters(self) -> None:
self.bias.data.zero_()
weight_index = 0
bias_index = 0
for i in range(self.num_layers):
input_size = self.input_size if i == 0 else self.hidden_size
# Create a tensor of the right size and initialize it.
init_tensor = self.weight.data.new(input_size, self.hidden_size * 6).zero_()
block_orthogonal(init_tensor, [input_size, self.hidden_size])
# Copy it into the flat weight.
self.weight.data[weight_index: weight_index + init_tensor.nelement()]\
.view_as(init_tensor).copy_(init_tensor)
weight_index += init_tensor.nelement()
# Same for the recurrent connection weight.
init_tensor = self.weight.data.new(self.hidden_size, self.hidden_size * 5).zero_()
block_orthogonal(init_tensor, [self.hidden_size, self.hidden_size])
self.weight.data[weight_index: weight_index + init_tensor.nelement()]\
.view_as(init_tensor).copy_(init_tensor)
weight_index += init_tensor.nelement()
# Set the forget bias to 1.
self.bias.data[bias_index + self.hidden_size:bias_index + 2 * self.hidden_size].fill_(1)
bias_index += 5 * self.hidden_size
def forward(self, inputs: PackedSequence, # pylint: disable=arguments-differ
# pylint: disable=unused-argument
initial_state: torch.Tensor = None)-> Tuple[PackedSequence, torch.Tensor]:
"""
Parameters
----------
inputs : ``PackedSequence``, required.
A batch first ``PackedSequence`` to run the stacked LSTM over.
initial_state : Tuple[torch.Tensor, torch.Tensor], optional, (default = None)
Currently, this is ignored.
Returns
-------
output_sequence : ``PackedSequence``
The encoded sequence of shape (batch_size, sequence_length, hidden_size)
final_states: ``torch.Tensor``
The per-layer final (state, memory) states of the LSTM, each with shape
(num_layers, batch_size, hidden_size).
"""
inputs, lengths = pad_packed_sequence(inputs, batch_first=True)
# Kernel takes sequence length first tensors.
inputs = inputs.transpose(0, 1)
sequence_length, batch_size, _ = inputs.size()
accumulator_shape = [self.num_layers, sequence_length + 1, batch_size, self.hidden_size]
state_accumulator = Variable(inputs.data.new(*accumulator_shape).zero_(), requires_grad=False)
memory_accumulator = Variable(inputs.data.new(*accumulator_shape).zero_(), requires_grad=False)
dropout_weights = inputs.data.new().resize_(self.num_layers, batch_size, self.hidden_size).fill_(1.0)
if self.training:
# Normalize by 1 - dropout_prob to preserve the output statistics of the layer.
dropout_weights.bernoulli_(1 - self.recurrent_dropout_probability)\
.div_((1 - self.recurrent_dropout_probability))
dropout_weights = Variable(dropout_weights, requires_grad=False)
gates = Variable(inputs.data.new().resize_(self.num_layers,
sequence_length,
batch_size, 6 * self.hidden_size))
lengths_variable = Variable(torch.IntTensor(lengths))
implementation = _AlternatingHighwayLSTMFunction(self.input_size,
self.hidden_size,
num_layers=self.num_layers,
train=self.training)
output, _ = implementation(inputs, self.weight, self.bias, state_accumulator,
memory_accumulator, dropout_weights, lengths_variable, gates)
# TODO(Mark): Also return the state here by using index_select with the lengths so we can use
# it as a Seq2VecEncoder.
output = output.transpose(0, 1)
output = pack_padded_sequence(output, lengths, batch_first=True)
return output, None
|
apache-2.0
| -5,288,858,517,103,349,000 | 49.003831 | 109 | 0.534365 | false | 4.740647 | false | false | false |
felgari/k2
|
report.py
|
1
|
10700
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Felipe Gallego. All rights reserved.
#
# This is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Script to report results.
"""
import sys
import os
import csv
import numpy as np
from ctes import *
from avpos import AvPos
from resdifpos import ResDiffPos
from aptrend import ApTrend
from kfiles import read_input_file, read_res_file
from utils import get_matchings
def report_file_name(index):
return REP_OUT_FILE_PREFIX + index + REP_OUT_FILE_EXT
def do_report(index, k_data, cl, b1_res, a2_res, b1_per, a2_per, extd,
pre_rf = None, sco_rf = None, pre_df = None, sco_df = None):
print("Generating report ...")
rep_ap = []
trend_1 = []
trend_2 = []
the_trend = []
out_file_name = os.path.join(DATA_PATH, report_file_name(index))
avp = AvPos()
avp.calculate()
rdp = ResDiffPos(cl)
rdp.calculate()
aptr = ApTrend()
print("Saving to file: %s" % out_file_name)
try:
with open(out_file_name, 'w') as f:
idx = 0
for k_elt in k_data:
k_name_1 = k_elt[K_NAME_1_COL]
k_name_2 = k_elt[K_NAME_2_COL]
if k_name_1 != K_UNKNOWN_NAME and k_name_2 != K_UNKNOWN_NAME:
data = b1_res
elt_type = TYPE_1_COL
cl_1 = cl.b1_data(k_name_1)
cl_2 = cl.b1_data(k_name_2)
per = b1_per
if not len(cl_1):
data = a2_res
elt_type = TYPE_2_COL
cl_1 = cl.a2_data(k_name_1)
cl_2 = cl.a2_data(k_name_2)
per = a2_per
mat1, val_trend1 = get_matchings(k_name_1, data, True)
mat2, val_trend2 = get_matchings(k_name_2, data, False)
trend_1.append(val_trend1)
trend_2.append(val_trend2)
f.write("%s\n" % GEN_SEP)
f.write("-> %s (%s) - %s (%s)\n" % \
(k_name_1, cl_1[CL_POS_COL], k_name_2, cl_2[CL_POS_COL]))
f.write("Ext %s\n" % extd.mean[idx])
dif_pos = cl_1[CL_POS_COL] - cl_2[CL_POS_COL]
f.write("Dif: %d\n" % (dif_pos))
sum_ran = None
for i in range(dif_pos - DIF_RANGE, dif_pos + DIF_RANGE + 1):
try:
if sum_ran:
sum_ran = [ sum_ran[j] + per[i][j] for j in range(len(sum_ran))]
else:
sum_ran = per[i]
f.write("%d %s\n" % (i, per[i]))
except KeyError:
f.write("%d No disp\n" % i)
dif = cl_1[CL_POS_COL] - cl_2[CL_POS_COL]
trend = rdp.trend(cl_1[CL_POS_COL], cl_2[CL_POS_COL], elt_type)
the_trend.append(trend)
f.write("Sm %s -> %s \n" % (sum_ran, trend))
name_1_trend = avp.trend(k_name_1)
name_2_trend = avp.trend(k_name_2)
avg_1 = np.mean(avp.avpos(k_name_1)[-LAST_POS:])
if avg_1 > avp.avpos(k_name_1)[-1]:
name_1_curr = AVPOS_TREND_DOWN
else:
name_1_curr = AVPOS_TREND_UP
avg_2 = np.mean(avp.avpos(k_name_2)[-LAST_POS:])
if avg_2 > avp.avpos(k_name_2)[-1]:
name_2_curr = AVPOS_TREND_DOWN
else:
name_2_curr = AVPOS_TREND_UP
f.write("Pos. %s: %s\n(AVG: %d) - Current %s - Trend %s\n" % \
(k_name_1, avp.avpos(k_name_1),
avg_1, name_1_curr, name_1_trend))
f.write("Pos. %s: %s\n(AVG: %d) - Current %s - Trend %s\n" % \
(k_name_2, avp.avpos(k_name_2),
avg_2, name_2_curr, name_2_trend))
if len(trend) > 0:
ap_t = aptr.calculate_ap(trend, name_1_trend,
name_2_trend, int(cl_1[CL_POS_COL]),
int(cl_2[CL_POS_COL]))
rep_ap.append(ap_t)
f.write("Ap trend: %s -> %s %s\n" % \
(ap_t, val_trend1, val_trend2))
else:
rep_ap.append(TREND_IG)
if pre_rf and sco_rf:
f.write("Pre RF (%.1f): %s\n" % (sco_rf[idx], pre_rf[idx]))
if pre_df and sco_df:
f.write("Pre DF (%.1f): %s\n" % (sco_df[idx], pre_df[idx]))
f.write("%s\n" % FIRST_SEP)
the_sco = []
for m in mat1:
if elt_type == TYPE_1_COL:
mat_cl = cl.b1_data(NAMES_CONVERT[m[MAT_NAME_2_COL]])
else:
mat_cl = cl.a2_data(NAMES_CONVERT[m[MAT_NAME_2_COL]])
m[MAT_RES_COL] = CHR_TO_RES[m[MAT_RES_COL]]
the_sco.append(int(m[-1][:m[-1].find('-')]))
the_mark = ''
if m[2] == MAX_IS_FIRST:
if mat_cl[CL_POS_COL] + DIFF_POS_THIRD < cl_1[CL_POS_COL]:
the_mark = THE_MARK
elif m[2] == MAX_IS_SECOND:
if cl_1[CL_POS_COL] + DIFF_POS_SECOND < mat_cl[CL_POS_COL]:
the_mark = THE_MARK
elif cl_1[CL_POS_COL] + DIFF_POS_THIRD < mat_cl[CL_POS_COL]:
the_mark = THE_MARK
if mat_cl[CL_POS_COL] < cl_2[CL_POS_COL] + REF_LEVEL:
the_ref = "%s [%s] " % (THE_REF, m[2])
else:
the_ref = ''
f.write("%s (%s) %s %s\n" % (m, mat_cl[CL_POS_COL],
the_ref ,the_mark))
if not len(the_sco):
print(m[MAT_NAME_2_COL])
print(elt_type)
the_sco.remove(max(the_sco))
the_sco.remove(min(the_sco))
f.write("%s\n" % SECOND_SEP)
the_sco2 = []
for m in mat2:
if elt_type == TYPE_1_COL:
mat_cl = cl.b1_data(NAMES_CONVERT[m[MAT_NAME_1_COL]])
else:
mat_cl = cl.a2_data(NAMES_CONVERT[m[MAT_NAME_1_COL]])
m[MAT_RES_COL] = CHR_TO_RES[m[MAT_RES_COL]]
the_sco2.append(int(m[-1][m[-1].find('-')+1:]))
the_mark = ''
if m[2] == MAX_IS_FIRST:
if cl_2[CL_POS_COL] + DIFF_POS_THIRD < mat_cl[CL_POS_COL]:
the_mark = THE_MARK
elif m[2] == MAX_IS_SECOND:
if mat_cl[CL_POS_COL] + DIFF_POS_SECOND < cl_2[CL_POS_COL]:
the_mark = THE_MARK
elif mat_cl[CL_POS_COL] + DIFF_POS_THIRD < cl_2[CL_POS_COL]:
the_mark = THE_MARK
if mat_cl[CL_POS_COL] < cl_1[CL_POS_COL] + REF_LEVEL:
the_ref = "%s [%s] " % (THE_REF, m[2])
else:
the_ref = ''
f.write("%s (%s) %s %s\n" % (m, mat_cl[CL_POS_COL],
the_ref ,the_mark))
f.write("%s\n" % SECOND_SEP)
the_sco2.remove(max(the_sco2))
the_sco2.remove(min(the_sco2))
f.write("%0.1f - %0.1f\n" % (np.mean(the_sco), np.mean(the_sco2)))
else:
trend_1.append(TREND_IG)
trend_2.append(TREND_IG)
rep_ap.append(TREND_IG)
idx += 1
aptr.write_data(index)
except IOError as ioe:
print("IOError saving file: '%s'" % out_file_name)
except KeyError as ke:
print("KeyError saving file: '%s'" % out_file_name)
except IndexError as ie:
print("IndexError saving file: '%s'" % out_file_name)
return rep_ap, trend_1, trend_2, the_trend
def report_generated(index):
return os.path.exists(report_file_name(index))
if __name__ == "__main__":
if len(sys.argv) == NUM_ARGS:
sys.exit(do_report(sys.argv[1]))
else:
print("The index is needed as argument.")
|
gpl-3.0
| -4,561,189,203,595,888,600 | 38.487085 | 96 | 0.38028 | false | 3.741259 | false | false | false |
nvbn/coviolations_web
|
projects/forms.py
|
1
|
1067
|
from pymongo import DESCENDING
from django import forms
from tasks.models import Tasks
from tasks.exceptions import TaskDoesNotExists
from .models import Project
class FindTaskForBadgeForm(forms.Form):
"""Find task for badge form"""
project = forms.ModelChoiceField(
Project.objects.all(), required=True, to_field_name='name',
)
commit = forms.CharField(required=False)
branch = forms.CharField(required=False)
def get_task(self):
"""Get task"""
filter_spec = {
'project': self.cleaned_data['project'].name,
}
if self.cleaned_data.get('commit'):
filter_spec['commit.hash'] = self.cleaned_data['commit']
if self.cleaned_data.get('branch'):
filter_spec['commit.branch'] = self.cleaned_data['branch']
task = Tasks.find_one(
filter_spec, sort=[('created', DESCENDING)], fields={
'status': True,
},
)
if task:
return task
else:
raise TaskDoesNotExists(filter_spec)
|
mit
| -7,728,021,554,287,230,000 | 31.333333 | 70 | 0.605436 | false | 4.167969 | false | false | false |
aliceinwire/virt-manager
|
virtManager/inspection.py
|
1
|
9120
|
#
# Copyright (C) 2011 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA.
#
from Queue import Queue, Empty
from threading import Thread
import logging
import os
from guestfs import GuestFS # pylint: disable=F0401
from virtManager.baseclass import vmmGObject
from virtManager.domain import vmmInspectionData
class vmmInspection(vmmGObject):
# Can't find a way to make Thread release our reference
_leak_check = False
def __init__(self):
vmmGObject.__init__(self)
self._thread = Thread(name="inspection thread", target=self._run)
self._thread.daemon = True
self._wait = 5 * 1000 # 5 seconds
self._q = Queue()
self._conns = {}
self._vmseen = {}
self._cached_data = {}
def _cleanup(self):
self._thread = None
self._q = Queue()
self._conns = {}
self._vmseen = {}
self._cached_data = {}
# Called by the main thread whenever a connection is added or
# removed. We tell the inspection thread, so it can track
# connections.
def conn_added(self, engine_ignore, conn):
obj = ("conn_added", conn)
self._q.put(obj)
def conn_removed(self, engine_ignore, uri):
obj = ("conn_removed", uri)
self._q.put(obj)
# Called by the main thread whenever a VM is added to vmlist.
def vm_added(self, conn, uuid):
ignore = conn
ignore = uuid
obj = ("vm_added")
self._q.put(obj)
def start(self):
# Wait a few seconds before we do anything. This prevents
# inspection from being a burden for initial virt-manager
# interactivity (although it shouldn't affect interactivity at
# all).
def cb():
self._thread.start()
return 0
logging.debug("waiting")
self.timeout_add(self._wait, cb)
def _run(self):
while True:
self._process_queue()
self._process_vms()
# Process everything on the queue. If the queue is empty when
# called, block.
def _process_queue(self):
first_obj = self._q.get()
self._process_queue_item(first_obj)
self._q.task_done()
try:
while True:
obj = self._q.get(False)
self._process_queue_item(obj)
self._q.task_done()
except Empty:
pass
def _process_queue_item(self, obj):
if obj[0] == "conn_added":
conn = obj[1]
if conn and not (conn.is_remote()):
uri = conn.get_uri()
self._conns[uri] = conn
conn.connect("vm-added", self.vm_added)
elif obj[0] == "conn_removed":
uri = obj[1]
del self._conns[uri]
elif obj[0] == "vm_added":
# Nothing - just a signal for the inspection thread to wake up.
pass
# Any VMs we've not seen yet? If so, process them.
def _process_vms(self):
for conn in self._conns.itervalues():
for vmuuid in conn.list_vm_uuids():
if not conn.is_active():
break
prettyvm = vmuuid
try:
vm = conn.get_vm(vmuuid)
prettyvm = conn.get_uri() + ":" + vm.get_name()
if vmuuid in self._vmseen:
data = self._cached_data.get(vmuuid)
if not data:
continue
if vm.inspection != data:
logging.debug("Found cached data for %s", prettyvm)
self._set_vm_inspection_data(vm, data)
continue
# Whether success or failure, we've "seen" this VM now.
self._vmseen[vmuuid] = True
self._process(conn, vm, vmuuid)
except:
logging.exception("%s: exception while processing",
prettyvm)
def _process(self, conn, vm, vmuuid):
g = GuestFS()
prettyvm = conn.get_uri() + ":" + vm.get_name()
ignore = vmuuid
disks = []
for disk in vm.get_disk_devices():
if (disk.path and
(disk.type == "block" or disk.type == "file") and
not disk.device == "cdrom"):
disks.append(disk)
if not disks:
logging.debug("%s: nothing to inspect", prettyvm)
return
# Add the disks. Note they *must* be added with readonly flag set.
for disk in disks:
path = disk.path
driver_type = disk.driver_type
if not (os.path.exists(path) and os.access(path, os.R_OK)):
logging.debug("%s: cannot access '%s', skipping inspection",
prettyvm, path)
return
g.add_drive_opts(path, readonly=1, format=driver_type)
g.launch()
# Inspect the operating system.
roots = g.inspect_os()
if len(roots) == 0:
logging.debug("%s: no operating systems found", prettyvm)
return
# Arbitrarily pick the first root device.
root = roots[0]
# Inspection results.
typ = g.inspect_get_type(root) # eg. "linux"
distro = g.inspect_get_distro(root) # eg. "fedora"
major_version = g.inspect_get_major_version(root) # eg. 14
minor_version = g.inspect_get_minor_version(root) # eg. 0
hostname = g.inspect_get_hostname(root) # string
product_name = g.inspect_get_product_name(root) # string
product_variant = g.inspect_get_product_variant(root) # string
# For inspect_list_applications and inspect_get_icon we
# require that the guest filesystems are mounted. However
# don't fail if this is not possible (I'm looking at you,
# FreeBSD).
filesystems_mounted = False
try:
# Mount up the disks, like guestfish --ro -i.
# Sort keys by length, shortest first, so that we end up
# mounting the filesystems in the correct order.
mps = list(g.inspect_get_mountpoints(root))
def compare(a, b):
if len(a[0]) > len(b[0]):
return 1
elif len(a[0]) == len(b[0]):
return 0
else:
return -1
mps.sort(compare)
for mp_dev in mps:
try:
g.mount_ro(mp_dev[1], mp_dev[0])
except:
logging.exception("%s: exception mounting %s on %s "
"(ignored)",
prettyvm, mp_dev[1], mp_dev[0])
filesystems_mounted = True
except:
logging.exception("%s: exception while mounting disks (ignored)",
prettyvm)
icon = None
apps = None
if filesystems_mounted:
# string containing PNG data
icon = g.inspect_get_icon(root, favicon=0, highquality=1)
if icon == "":
icon = None
# Inspection applications.
apps = g.inspect_list_applications(root)
# Force the libguestfs handle to close right now.
del g
# Log what we found.
logging.debug("%s: detected operating system: %s %s %d.%d (%s)",
prettyvm, typ, distro, major_version, minor_version,
product_name)
logging.debug("hostname: %s", hostname)
if icon:
logging.debug("icon: %d bytes", len(icon))
if apps:
logging.debug("# apps: %d", len(apps))
data = vmmInspectionData()
data.type = str(type)
data.distro = str(distro)
data.major_version = int(major_version)
data.minor_version = int(minor_version)
data.hostname = str(hostname)
data.product_name = str(product_name)
data.product_variant = str(product_variant)
data.icon = icon
data.applications = list(apps)
self._set_vm_inspection_data(vm, data)
def _set_vm_inspection_data(self, vm, data):
vm.inspection = data
vm.inspection_data_updated()
self._cached_data[vm.get_uuid()] = data
|
gpl-2.0
| 184,982,375,710,807,000 | 33.157303 | 79 | 0.539035 | false | 4.119241 | false | false | false |
chunshen1987/superMC
|
scripts/generateEbeprofiles.py
|
1
|
11849
|
#! /usr/bin/env python
import sys, shutil
from numpy import *
from os import path, makedirs
import subprocess
import re
from glob import glob
class color:
"""
define colors in the terminal
"""
purple = '\033[95m'
cyan = '\033[96m'
darkcyan = '\033[36m'
blue = '\033[94m'
green = '\033[92m'
yellow = '\033[93m'
red = '\033[91m'
bold = '\033[1m'
underline = '\033[4m'
end = '\033[0m'
#dictionary for parameter list in superMC
superMCParameters = {
'which_mc_model' : 5,
'sub_model' : 1,
'Npmin' : 2,
'Npmax' : 1000,
'bmin' : 0,
'bmax' : 20,
'cutdSdy' : 1,
'cutdSdy_lowerBound' : 551.864,
'cutdSdy_upperBound' : 1000000.0,
'Aproj' : 197,
'Atarg' : 197,
'ecm' : 200,
'finalFactor' : 1.0,
'use_ed' : 0,
'use_sd' : 1,
'alpha' : 0.14,
'lambda' : 0.138,
'operation' : 2,
'cc_fluctuation_model' : 6,
'output_TATB' : 0,
'output_rho_binary' : 0,
'output_TA' : 1,
'output_rhob' : 0,
'output_spectator_density' : 1,
'generate_reaction_plane_avg_profile' : 0,
'nev' : 1000,
'average_from_order' : 2,
'average_to_order' : 2,
}
nucleus_name_dict = {
208: 'Pb',
197: 'Au',
238: 'U',
63: 'Cu',
1: 'p',
2: 'd',
3: 'He3',
}
nucleus_number_dict = {
'Pb': 208,
'Au': 197,
'U': 238,
'Cu': 63,
'p': 1,
'd': 2,
'He3': 3,
}
def form_assignment_string_from_dict(adict):
"""
Generate a parameter-equals-value string from the given dictionary. The
generated string has a leading blank.
"""
result = ""
for aparameter in adict.keys():
result += " {}={}".format(aparameter, adict[aparameter])
return result
def translate_centrality_cut(centrality_bound, cut_type='total_entropy'):
"""
translate the centrality boundaries to Npart, dS/dy, b values and update
the parameter lists for simulations
"""
centrality_lower_bound = centrality_bound[0]
centrality_upper_bound = centrality_bound[1]
if superMCParameters['which_mc_model'] == 5:
model_name = 'MCGlb'
elif superMCParameters['which_mc_model'] == 1:
model_name = 'MCKLN'
if superMCParameters['cc_fluctuation_model'] != 0:
multiplicity_fluctuation = 'withMultFluct'
else:
multiplicity_fluctuation = 'noMultFluct'
collision_energy = '%g' % superMCParameters['ecm']
Aproj = superMCParameters['Aproj']
Atrag = superMCParameters['Atarg']
if Aproj == Atrag: #symmetric collision
nucleus_name = nucleus_name_dict[Aproj]+nucleus_name_dict[Atrag]
else: # asymmetric collision
nucleus_name = (nucleus_name_dict[min(Aproj, Atrag)]
+ nucleus_name_dict[max(Aproj, Atrag)])
centrality_cut_file_name = (
'iebe_centralityCut_%s_%s_sigmaNN_gauss_d0.9_%s.dat'
% (cut_type, model_name + nucleus_name + collision_energy,
multiplicity_fluctuation)
)
try:
centrality_cut_file = loadtxt(
path.join(path.abspath('./centrality_cut_tables'),
centrality_cut_file_name))
except IOError:
print("Can not find the centrality cut table for the collision system")
print(centrality_cut_file_name)
exit(1)
lower_idx = (
centrality_cut_file[:, 0].searchsorted(centrality_lower_bound+1e-30))
upper_idx = (
centrality_cut_file[:, 0].searchsorted(centrality_upper_bound))
cut_value_upper = (
(centrality_cut_file[lower_idx-1, 1]
- centrality_cut_file[lower_idx, 1])
/(centrality_cut_file[lower_idx-1, 0]
- centrality_cut_file[lower_idx, 0])
*(centrality_lower_bound - centrality_cut_file[lower_idx-1, 0])
+ centrality_cut_file[lower_idx-1, 1]
)
cut_value_low = (
(centrality_cut_file[upper_idx-1, 1]
- centrality_cut_file[upper_idx, 1])
/(centrality_cut_file[upper_idx-1, 0]
- centrality_cut_file[upper_idx, 0])
*(centrality_upper_bound - centrality_cut_file[upper_idx-1, 0])
+ centrality_cut_file[upper_idx-1, 1]
)
if cut_type == 'total_entropy':
superMCParameters['cutdSdy'] = 1
npart_min = min(centrality_cut_file[lower_idx-1:upper_idx+1, 2])
npart_max = max(centrality_cut_file[lower_idx-1:upper_idx+1, 3])
b_min = min(centrality_cut_file[lower_idx-1:upper_idx+1, 4])
b_max = max(centrality_cut_file[lower_idx-1:upper_idx+1, 5])
superMCParameters['cutdSdy_lowerBound'] = cut_value_low
superMCParameters['cutdSdy_upperBound'] = cut_value_upper
elif cut_type == 'Npart':
superMCParameters['cutdSdy'] = 0
b_min = min(centrality_cut_file[lower_idx-1:upper_idx+1, 2])
b_max = max(centrality_cut_file[lower_idx-1:upper_idx+1, 3])
npart_min = cut_value_low
npart_max = cut_value_upper
superMCParameters['Npmax'] = npart_max
superMCParameters['Npmin'] = npart_min
superMCParameters['bmax'] = b_max
superMCParameters['bmin'] = b_min
#print out information
print('-'*80)
print('%s collisions at sqrt{s} = %s A GeV with %s initial conditions'
% (nucleus_name , collision_energy, model_name))
print("Centrality : %g - %g"
% (centrality_lower_bound, centrality_upper_bound) + r"%")
print('centrality cut on ', cut_type)
if cut_type == 'total_entropy':
print('dS/dy :', cut_value_low, '-', cut_value_upper)
print("Npart: ", npart_min, '-', npart_max)
print("b: ", b_min, '-', b_max, ' fm')
print('-'*80)
return
def update_superMC_dict(model, ecm, collsys, nev):
"""
update the superMCParameters dictionary with users input settings
"""
superMCParameters['nev'] = nev
if model == 'MCGlb':
superMCParameters['which_mc_model'] = 5
superMCParameters['sub_model'] = 1
elif model == 'MCKLN':
superMCParameters['which_mc_model'] = 1
superMCParameters['sub_model'] = 7
superMCParameters['cc_fluctuation_model'] = 0
else:
print(sys.argv[0], ': invalid initial model type', model)
print_help_message()
sys.exit(1)
superMCParameters['ecm'] = ecm
if ecm == 2760:
if model == 'MCGlb':
superMCParameters['alpha'] = 0.118
elif model == 'MCKLN':
superMCParameters['lambda'] = 0.138
if ecm <= 200:
if model == 'MCGlb':
superMCParameters['alpha'] = 0.14
elif model == 'MCKLN':
superMCParameters['lambda'] = 0.218
superMCParameters['Aproj'] = nucleus_number_dict[collsys[0]]
superMCParameters['Atarg'] = nucleus_number_dict[collsys[1]]
# for checking
#for x in superMCParameters.keys():
# print x + ': ' + str(superMCParameters[x])
return
def generateEbeprofiles(output_path, centrality_bounds,
cut_type='total_entropy'):
runRecord = open('./runRecord.dat', 'a')
errRecord = open('./errRecord.dat', 'a')
if not path.exists(output_path):
makedirs(output_path)
translate_centrality_cut(centrality_bounds, cut_type)
cen_string = '%g-%g' %(centrality_bounds[0], centrality_bounds[1])
option = form_assignment_string_from_dict(superMCParameters)
cmd = './superMC.e' + option
superMC_folder = path.abspath('./')
print(cmd)
runRecord.write(cmd)
p = subprocess.Popen(cmd, shell=True, stdout=runRecord,
stderr=errRecord, cwd=superMC_folder)
p.wait()
# save files
store_folder = output_path
filelist = glob(path.join(superMC_folder, "data", "*"))
for ifile, filepath in enumerate(filelist):
filename = filepath.split("/")[-1]
if "block" in filename:
newfilename = re.sub("event", "C{}_event".format(cen_string),
filename)
newfilename = re.sub("_block", "".format(cen_string), newfilename)
shutil.move(filepath, path.join(store_folder, newfilename))
shutil.move('./runRecord.dat', path.join(store_folder, 'runRecord.dat'))
shutil.move('./errRecord.dat', path.join(store_folder, 'errRecord.dat'))
def print_help_message():
print("Usage : ")
print(color.bold
+ "{} -ecm ecm ".format(sys.argv[0])
+ "-cen cen_bounds"
+ "[-model model -collision_system collsys -cut_type cut_type]"
+ color.end)
print("Usage of {} command line arguments: ".format(sys.argv[0]))
print(color.bold + "-cen" + color.end
+ " centrality bounds(%): "
+ color.purple + "20-30" + color.end)
print(color.bold + "-ecm" + color.end
+ " collision energy (GeV): "
+ color.purple + "7.7, 11.5, 19.6, 27, 39, 62.4, 200, 2760, 5500"
+ color.end)
print(color.bold + "-cut_type" + color.end
+ " centrality cut type: "
+ color.purple + color.bold + "total_entropy[default]" + color.end
+ color.purple + ", Npart" + color.end)
print(color.bold + "-model" + color.end + " initial condition model: "
+ color.purple + color.bold + " MCGlb[default]" + color.end
+ color.purple + ", MCKLN" + color.end)
print(color.bold + "-collision_system" + color.end
+ " type of collision system: "
+ color.purple + color.bold + " Pb+Pb[default]" + color.end
+ color.purple + ", Au+Au, Cu+Au, U+U, p+Pb, p+Au, d+Au, He3+Au"
+ color.end)
if __name__ == "__main__":
# set default values
model = 'MCGlb'
cut_type = 'total_entropy'
#cut_type = 'Npart'
collsys = 'Au+Au'.split('+')
output_path = path.abspath('./RESULTS/')
nev = 100
while len(sys.argv) > 1:
option = sys.argv[1]
del sys.argv[1]
if option == '-model':
model = str(sys.argv[1])
del sys.argv[1]
elif option == '-collision_system':
collsys = str(sys.argv[1]).split('+')
del sys.argv[1]
elif option == "-nev":
nev = int(sys.argv[1])
del sys.argv[1]
elif option == '-cut_type':
cut_type = str(sys.argv[1])
del sys.argv[1]
if cut_type not in ['total_entropy', 'Npart']:
print(sys.argv[0], ': invalid centrality cut type', cut_type)
print_help_message()
sys.exit(1)
elif option == '-cen':
centrality_bounds = [float(istr) for istr in str(sys.argv[1]).split('-')]
del sys.argv[1]
elif option == '-ecm':
ecm = float(sys.argv[1])
del sys.argv[1]
elif option == '-output':
folder = float(sys.argv[1])
output_path = path.join(path.abspath('./'), folder)
del sys.argv[1]
elif option == '-h':
print_help_message()
sys.exit(0)
else:
print(sys.argv[0], ': invalid option ', option)
print_help_message()
sys.exit(1)
try:
update_superMC_dict(model, ecm, collsys, nev)
generateEbeprofiles(output_path, centrality_bounds, cut_type)
except NameError:
print_help_message()
sys.exit(1)
|
gpl-3.0
| 1,107,092,717,768,073,200 | 34.160237 | 85 | 0.545025 | false | 3.291389 | false | false | false |
sonusz/PhasorToolBox
|
examples/freq_meter.py
|
1
|
1820
|
#!/usr/bin/env python3
"""
This is an real-time frequency meter of two PMUs.
This code connects to two PMUs, plot the frequency of the past 300 time-stamps and update the plot in real-time.
"""
from phasortoolbox import PDC,Client
import matplotlib.pyplot as plt
import numpy as np
import gc
import logging
logging.basicConfig(level=logging.DEBUG)
class FreqMeter(object):
def __init__(self):
x = np.linspace(-10.0, 0.0, num=300, endpoint=False)
y = [60.0]*300
plt.ion()
self.fig = plt.figure()
self.ax1 = self.fig.add_subplot(211)
self.line1, = self.ax1.plot(x, y)
plt.title('PMU1 Frequency Plot')
plt.xlabel('Time (s)')
plt.ylabel('Freq (Hz)')
self.ax2 = self.fig.add_subplot(212)
self.line2, = self.ax2.plot(x, y)
plt.title('PMU2 Frequency Plot')
plt.xlabel('Time (s)')
plt.ylabel('Freq (Hz)')
plt.tight_layout()
def update_plot(self, synchrophasors):
y_data = [[],[]]
for synchrophasor in synchrophasors:
for i, msg in enumerate(synchrophasor):
y_data[i].append(msg.data.pmu_data[0].freq)
self.line1.set_ydata(y_data[0])
self.line2.set_ydata(y_data[1])
self.ax1.set_ylim(min(y_data[0]),max(y_data[0]))
self.ax2.set_ylim(min(y_data[1]),max(y_data[1]))
self.fig.canvas.draw()
self.fig.canvas.flush_events()
del(synchrophasors)
gc.collect()
if __name__ == '__main__':
pmu_client1 = Client(remote_ip='10.0.0.1', remote_port=4722, idcode=1, mode='TCP')
pmu_client2 = Client(remote_ip='10.0.0.2', remote_port=4722, idcode=2, mode='TCP')
fm = FreqMeter()
pdc = PDC(clients=[pmu_client1,pmu_client2],history=300)
pdc.callback = fm.update_plot
pdc.run()
|
mit
| 5,007,648,745,987,173,000 | 29.847458 | 112 | 0.606044 | false | 2.902711 | false | false | false |
Nexedi/neoppod
|
neo/master/handlers/client.py
|
1
|
6146
|
#
# Copyright (C) 2006-2019 Nexedi SA
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from neo.lib.handler import DelayEvent
from neo.lib.protocol import Packets, ProtocolError, MAX_TID, Errors
from ..app import monotonic_time
from . import MasterHandler
class ClientServiceHandler(MasterHandler):
""" Handler dedicated to client during service state """
def handlerSwitched(self, conn, new):
assert new
super(ClientServiceHandler, self).handlerSwitched(conn, new)
def _connectionLost(self, conn):
# cancel its transactions and forgot the node
app = self.app
node = app.nm.getByUUID(conn.getUUID())
assert node is not None, conn
for x in app.tm.clientLost(node):
app.notifyTransactionAborted(*x)
node.setUnknown()
app.broadcastNodesInformation([node])
def askBeginTransaction(self, conn, tid):
"""
A client request a TID, nothing is kept about it until the finish.
"""
app = self.app
# Delay new transaction as long as we are waiting for NotifyReady
# answers, otherwise we can't know if the client is expected to commit
# the transaction in full to all these storage nodes.
if app.storage_starting_set:
raise DelayEvent
node = app.nm.getByUUID(conn.getUUID())
tid = app.tm.begin(node, app.storage_readiness, tid)
conn.answer(Packets.AnswerBeginTransaction(tid))
def askNewOIDs(self, conn, num_oids):
conn.answer(Packets.AnswerNewOIDs(self.app.tm.getNextOIDList(num_oids)))
def getEventQueue(self):
# for askBeginTransaction & failedVote
return self.app.tm
def failedVote(self, conn, *args):
app = self.app
conn.answer((Errors.Ack if app.tm.vote(app, *args) else
Errors.IncompleteTransaction)())
def askFinishTransaction(self, conn, ttid, oid_list, checked_list):
app = self.app
tid, node_list = app.tm.prepare(
app,
ttid,
oid_list,
checked_list,
conn.getPeerId(),
)
if tid:
p = Packets.AskLockInformation(ttid, tid)
for node in node_list:
node.ask(p)
else:
conn.answer(Errors.IncompleteTransaction())
# It's simpler to abort automatically rather than asking the client
# to send a notification on tpc_abort, since it would have keep the
# transaction longer in list of transactions.
# This should happen so rarely that we don't try to minimize the
# number of abort notifications by looking the modified partitions.
self.abortTransaction(conn, ttid, app.getStorageReadySet())
def askFinalTID(self, conn, ttid):
tm = self.app.tm
if tm.getLastTID() < ttid:
# Invalid ttid, or aborted transaction.
tid = None
elif ttid in tm:
# Transaction is being finished.
# We'll answer when it is unlocked.
tm[ttid].registerForNotification(conn.getUUID())
return
else:
# Transaction committed ? Tell client to ask storages.
tid = MAX_TID
conn.answer(Packets.AnswerFinalTID(tid))
def askPack(self, conn, tid):
app = self.app
if app.packing is None:
storage_list = app.nm.getStorageList(only_identified=True)
app.packing = (conn, conn.getPeerId(),
{x.getUUID() for x in storage_list})
p = Packets.AskPack(tid)
for storage in storage_list:
storage.getConnection().ask(p)
else:
conn.answer(Packets.AnswerPack(False))
def abortTransaction(self, conn, tid, uuid_list):
# Consider a failure when the connection between the storage and the
# client breaks while the answer to the first write is sent back.
# In other words, the client can not know the exact set of nodes that
# know this transaction, and it sends us all nodes it considered for
# writing.
# We must also add those that are waiting for this transaction to be
# finished (returned by tm.abort), because they may have join the
# cluster after that the client started to abort.
app = self.app
involved = app.tm.abort(tid, conn.getUUID())
involved.update(uuid_list)
app.notifyTransactionAborted(tid, involved)
# like ClientServiceHandler but read-only & only for tid <= backup_tid
class ClientReadOnlyServiceHandler(ClientServiceHandler):
def _readOnly(self, conn, *args, **kw):
conn.answer(Errors.ReadOnlyAccess(
'read-only access because cluster is in backuping mode'))
askBeginTransaction = _readOnly
askNewOIDs = _readOnly
askFinishTransaction = _readOnly
askFinalTID = _readOnly
askPack = _readOnly
abortTransaction = _readOnly
# XXX LastIDs is not used by client at all, and it requires work to determine
# last_oid up to backup_tid, so just make it non-functional for client.
askLastIDs = _readOnly
# like in MasterHandler but returns backup_tid instead of last_tid
def askLastTransaction(self, conn):
assert self.app.backup_tid is not None # we are in BACKUPING mode
backup_tid = self.app.pt.getBackupTid(min)
conn.answer(Packets.AnswerLastTransaction(backup_tid))
|
gpl-2.0
| 2,060,158,760,011,242,200 | 39.701987 | 81 | 0.642857 | false | 4.158322 | false | false | false |
Code4SA/municipal-data
|
bin/materialised_views.py
|
1
|
12424
|
"""
A script to build a set files of materialised views of the data presented
in municipality profiles on the Municipal Money website.
Municipality-specific profile data is stored in municipality-specific files
since producing them takes a lot of time with many queries against the API.
By storing municipality-specific data separately from comparisons to other
municipalities based on this data (e.g. medians, number of similar
municipalities in norm bounds) allows quick iteration on the latter without
recalculating muni-specifics from the API each time.
By storing this data to file instead of database, version control helps to
understand what changed as code is changed and avoid unintended changes to
calculations. It also allows deploying template and data changes synchronously
and avoids data/code structure mismatch that could occur if the data is in
a database and not upgraded during deployment - potentially leading to downtime.
By keeping this script separate from the Municipal Money website django app,
this data can be recalculated without more-complex environment setup.
"""
import sys
sys.path.append('.')
from collections import defaultdict
from itertools import groupby
from scorecard.profile_data import (
APIData,
MuniApiClient,
Demarcation,
get_indicators,
get_indicator_calculators,
)
import argparse
import json
API_URL = 'https://municipaldata.treasury.gov.za/api'
def main():
parser = argparse.ArgumentParser(
description='Tool to dump the materialised views of the municipal finance data used on the Municipal Money website.')
parser.add_argument(
'--api-url',
help='API URL to use. Default: ' + API_URL)
command_group = parser.add_mutually_exclusive_group(required=True)
command_group.add_argument(
'--profiles-from-api',
action='store_true',
help='Fetch profile data from API, generate and store profiles.')
command_group.add_argument(
'--calc-medians',
action='store_true',
help='Calculate medians from stored profiles and store.')
command_group.add_argument(
'--calc-rating-counts',
action='store_true',
help='Calculate the number of items with each rating from stored profiles and store.')
parser.add_argument(
'--print-sets',
action='store_true',
help='Print the distribution sets')
parser.add_argument(
'--skip',
nargs='?',
default=0,
help='The number of municipalities to skip')
args = parser.parse_args()
if args.api_url:
api_url = args.api_url
else:
api_url = API_URL
if args.profiles_from_api:
generate_profiles(args, api_url)
elif args.calc_medians:
calculate_medians(args, api_url)
elif args.calc_rating_counts:
calculate_rating_counts(args, api_url)
def generate_profiles(args, api_url):
api_client = MuniApiClient(api_url)
munis = get_munis(api_client)
for muni in munis[int(args.skip):]:
demarcation_code = muni.get('municipality.demarcation_code')
api_data = APIData(api_client.API_URL, demarcation_code, client=api_client)
api_data.fetch_data()
indicators = get_indicators(api_data)
profile = {
'mayoral_staff': api_data.mayoral_staff(),
'muni_contact': api_data.muni_contact(),
'audit_opinions': api_data.audit_opinions(),
'indicators': indicators,
'demarcation': Demarcation(api_data).as_dict(),
}
filename = "scorecard/materialised/profiles/%s.json" % demarcation_code
with open(filename, 'w', encoding="utf8") as f:
json.dump(profile, f, sort_keys=True, indent=4, separators=(',', ': '))
def calculate_medians(args, api_url):
api_client = MuniApiClient(api_url)
munis = get_munis(api_client)
for muni in munis:
demarcation_code = muni.get('municipality.demarcation_code')
filename = "scorecard/materialised/profiles/%s.json" % demarcation_code
with open(filename, 'r') as f:
profile = json.load(f)
indicators = profile['indicators']
muni.update(indicators)
nat_sets, nat_medians = calc_national_medians(munis)
prov_sets, prov_medians = calc_provincial_medians(munis)
if args.print_sets:
print("Indicator value sets by MIIF category nationally")
print(json.dumps(nat_sets, sort_keys=True, indent=4, separators=(',', ': ')))
print
print("Indicator value sets by MIIF category and province")
print(json.dumps(prov_sets, sort_keys=True, indent=4, separators=(',', ': ')))
# write medians
filename = "scorecard/materialised/indicators/distribution/median.json"
medians = {
'provincial': prov_medians,
'national': nat_medians,
}
with open(filename, 'w', encoding="utf8") as f:
json.dump(medians, f, sort_keys=True, indent=4, separators=(',', ': '))
def calc_national_medians(munis):
nat_sets = get_national_miif_sets(munis)
nat_medians = defaultdict(lambda: defaultdict(dict))
# calculate national median per MIIF category and year for each indicator
for name in nat_sets.keys():
for dev_cat in nat_sets[name].keys():
for year in nat_sets[name][dev_cat].keys():
results = [period['result'] for period in nat_sets[name][dev_cat][year]]
nat_medians[name][dev_cat][year] = median(results)
return nat_sets, nat_medians
def calc_provincial_medians(munis):
prov_sets = get_provincial_miif_sets(munis)
prov_medians = defaultdict(lambda: defaultdict(lambda: defaultdict(dict)))
# calculate provincial median per province, MIIF category and year for each indicator
for name in prov_sets.keys():
for prov_code in prov_sets[name].keys():
for dev_cat in prov_sets[name][prov_code].keys():
for year in prov_sets[name][prov_code][dev_cat].keys():
results = [period['result'] for period in prov_sets[name][prov_code][dev_cat][year]]
prov_medians[name][prov_code][dev_cat][year] = median(results)
return prov_sets, prov_medians
def median(items):
sorted_items = sorted(items)
count = len(sorted_items)
if count % 2 == 1:
# middle item of odd set is floor of half of count
return sorted_items[count//2]
else:
# middle item of even set is mean of middle two items
return (sorted_items[(count-1)//2] + sorted_items[(count+1)//2])/2.0
def calculate_rating_counts(args, api_url):
api_client = MuniApiClient(api_url)
munis = get_munis(api_client)
for muni in munis:
demarcation_code = muni.get('municipality.demarcation_code')
filename = "scorecard/materialised/profiles/%s.json" % demarcation_code
with open(filename, 'r') as f:
profile = json.load(f)
indicators = profile['indicators']
muni.update(indicators)
nat_sets, nat_rating_counts = calc_national_rating_counts(munis)
prov_sets, prov_rating_counts = calc_provincial_rating_counts(munis)
if args.print_sets:
print("Indicator value sets by MIIF category nationally")
print(json.dumps(nat_sets, sort_keys=True, indent=4, separators=(',', ': ')))
print
print("Indicator value sets by MIIF category and province")
print(json.dumps(prov_sets, sort_keys=True, indent=4, separators=(',', ': ')))
# write rating counts
filename = "scorecard/materialised/indicators/distribution/rating_counts.json"
rating_counts = {
'provincial': prov_rating_counts,
'national': nat_rating_counts,
}
with open(filename, 'w', encoding="utf8") as f:
json.dump(rating_counts, f, sort_keys=True, indent=4, separators=(',', ': '))
def calc_national_rating_counts(munis):
"""
Calculate the number of munis with each norm rating per MIIF category
and year for each indicator
"""
nat_sets = get_national_miif_sets(munis)
nat_rating_counts = defaultdict(lambda: defaultdict(lambda: defaultdict(dict)))
rating_key = lambda period: period['rating']
for name in nat_sets.keys():
for dev_cat in nat_sets[name].keys():
for year in nat_sets[name][dev_cat].keys():
rating_sorted = sorted(nat_sets[name][dev_cat][year], key=rating_key)
for rating, rating_group in groupby(rating_sorted, rating_key):
nat_rating_counts[name][dev_cat][year][rating] = len(list(rating_group))
return nat_sets, nat_rating_counts
def calc_provincial_rating_counts(munis):
"""
Calculate the number of munis with each norm rating per province,
MIIF category and year for each indicator
"""
prov_sets = get_provincial_miif_sets(munis)
prov_rating_counts = defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(dict))))
rating_key = lambda period: period['rating']
for name in prov_sets.keys():
for prov_code in prov_sets[name].keys():
for dev_cat in prov_sets[name][prov_code].keys():
for year in prov_sets[name][prov_code][dev_cat].keys():
rating_sorted = sorted(prov_sets[name][prov_code][dev_cat][year], key=rating_key)
for rating, rating_group in groupby(rating_sorted, rating_key):
prov_rating_counts[name][prov_code][dev_cat][year][rating] = len(list(rating_group))
return prov_sets, prov_rating_counts
def get_national_miif_sets(munis):
"""
collect set of indicator values for each MIIF category and year
returns dict of the form {
'cash_coverage': {
'B1': {
'2015': [{'result': ...}]
}
}
}
"""
nat_sets = defaultdict(lambda: defaultdict(lambda: defaultdict(list)))
dev_cat_key = lambda muni: muni['municipality.miif_category']
dev_cat_sorted = sorted(munis, key=dev_cat_key)
for calculator in get_indicator_calculators(has_comparisons=True):
name = calculator.indicator_name
for dev_cat, dev_cat_group in groupby(dev_cat_sorted, dev_cat_key):
for muni in dev_cat_group:
for period in muni[name]['values']:
if period['result'] is not None:
nat_sets[name][dev_cat][period['date']].append(period)
return nat_sets
def get_provincial_miif_sets(munis):
"""
collect set of indicator values for each province, MIIF category and year
returns dict of the form {
'cash_coverage': {
'FS': {
'B1': {
'2015': [{'result': ...}]
}
}
}
}
"""
prov_sets = defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(list))))
dev_cat_key = lambda muni: muni['municipality.miif_category']
dev_cat_sorted = sorted(munis, key=dev_cat_key)
prov_key = lambda muni: muni['municipality.province_code']
for calculator in get_indicator_calculators(has_comparisons=True):
name = calculator.indicator_name
for dev_cat, dev_cat_group in groupby(dev_cat_sorted, dev_cat_key):
prov_sorted = sorted(dev_cat_group, key=prov_key)
for prov_code, prov_group in groupby(prov_sorted, prov_key):
for muni in prov_group:
for period in muni[name]['values']:
if period['result'] is not None:
prov_sets[name][prov_code][dev_cat][period['date']].append(period)
return prov_sets
def get_munis(api_client):
query = api_client.api_get({'query_type': 'facts',
'cube': 'municipalities',
'fields': [
'municipality.demarcation_code',
'municipality.name',
'municipality.miif_category',
'municipality.province_code',
],
'value_label': '',
})
result = query.result()
result.raise_for_status()
body = result.json()
if body.get("total_cell_count") == body.get("page_size"):
raise Exception("should page municipalities")
return body.get("data")
if __name__ == "__main__":
main()
|
mit
| 7,838,789,445,039,154,000 | 38.44127 | 125 | 0.632325 | false | 3.681185 | false | false | false |
jezdez/hirefire
|
hirefire/procs/hotqueue.py
|
1
|
1845
|
from __future__ import absolute_import
from hotqueue import HotQueue
from . import ClientProc
class HotQueueProc(ClientProc):
"""
A proc class for the `HotQueue
<http://richardhenry.github.com/hotqueue/>`_ library.
:param name: the name of the proc (required)
:param queues: list of queue names to check (required)
:param connection_params: the connection parameter to use by default
(optional)
:type name: str
:type queues: str or list
:type connection_params: dict
Example::
from hirefire.procs.hotqueue import HotQueueProc
class WorkerHotQueueProc(HotQueueProc):
name = 'worker'
queues = ['myqueue']
connection_params = {
'host': 'localhost',
'port': 6379,
'db': 0,
}
"""
#: The name of the proc (required).
name = None
#: The list of queues to check (required).
queues = []
#: The connection parameter to use by default (optional).
connection_params = {}
def __init__(self, connection_params=None, *args, **kwargs):
super(HotQueueProc, self).__init__(*args, **kwargs)
if connection_params is not None:
self.connection_params = connection_params
def client(self, queue):
"""
Given one of the configured queues returns a
:class:`hotqueue.HotQueue` instance with the
:attr:`~hirefire.procs.hotqueue.HotQueueProc.connection_params`.
"""
if isinstance(queue, HotQueue):
return queue
return HotQueue(queue, **self.connection_params)
def quantity(self, **kwargs):
"""
Returns the aggregated number of tasks of the proc queues.
"""
return sum([len(client) for client in self.clients])
|
bsd-3-clause
| -1,827,444,227,618,777,600 | 28.285714 | 72 | 0.598374 | false | 4.300699 | false | false | false |
kootenpv/brightml
|
brightml/timer.py
|
1
|
1228
|
import time
class ZmqTimerManager(object):
def __init__(self):
self.timers = []
self.next_call = 0
def add_timer(self, timer):
self.timers.append(timer)
def check(self):
if time.time() > self.next_call:
for timer in self.timers:
timer.check()
def get_next_interval(self):
if time.time() >= self.next_call:
call_times = []
for timer in self.timers:
call_times.append(timer.get_next_call())
self.next_call = min(call_times)
if self.next_call < time.time():
val = 1
else:
val = (self.next_call - time.time()) * 1000
else:
val = (self.next_call - time.time()) * 1000
if val < 1:
val = 1
return val
class ZmqTimer(object):
def __init__(self, interval, callback):
self.interval = interval
self.callback = callback
self.last_call = 0
def check(self):
if time.time() > (self.interval + self.last_call):
self.callback()
self.last_call = time.time()
def get_next_call(self):
return self.last_call + self.interval
|
mit
| 6,674,582,009,593,183,000 | 25.695652 | 59 | 0.517101 | false | 3.861635 | false | false | false |
socialsensor/community-evolution-analysis
|
matlab/python_data_parsing/json_mention_multifile_noDialog_crawler.py
|
1
|
2244
|
#-------------------------------------------------------------------------------
# Purpose: parsing data from the crawler's "rawmetadata.json.#" json files to a form:
# author mentioned1,mentioned2,... unixTimestamp + text \n
# creating as many txt files as there are json files.
# This .py file does not present the user with a folder selection dialog.
# Required libs: unidecode
# Author: konkonst
#
# Created: 31/05/2013
# Copyright: (c) ITI (CERTH) 2013
# Licence: <apache licence 2.0>
#-------------------------------------------------------------------------------
import json
import os, glob
import codecs, unicodedata
from unidecode import unidecode
# User selects dataset folder
dataset_path = "E:/konkonst/retriever/crawler_temp/"
#Parsing commences###
counter=0
for filename in sorted(glob.glob(dataset_path+"/rawmetadata.json.*"),reverse=True):#json files
print(filename)
my_file=open(filename,"r")
counter+=1
my_txt=open(dataset_path+"/auth_ment_time_txt_"+str(counter)+".txt","w")#target files
read_line=my_file.readline()
ustr_to_load = unicode(read_line, 'iso-8859-15')
while read_line:
json_line=json.loads(ustr_to_load)##,encoding="cp1252")#.decode("utf-8","replace")
if "delete" in json_line or "scrub_geo" in json_line or "limit" in json_line:
read_line=my_file.readline()
ustr_to_load = unicode(read_line, 'iso-8859-15')
continue
else:
if json_line["entities"]["user_mentions"] and json_line["user"]["screen_name"]:
len_ment=len(json_line["entities"]["user_mentions"])
mentions=[]
for i in range(len_ment):
mentions.append(json_line["entities"]["user_mentions"][i]["screen_name"])
my_text=json_line["text"].replace("\n", "")
my_text=unidecode(my_text)
my_txt.write(json_line["user"]["screen_name"]+"\t" + ",".join(mentions)+"\t"+"\""+json_line["created_at"]+"\""+"\t"+my_text+"\n")
read_line=my_file.readline()
ustr_to_load = unicode(read_line, 'iso-8859-15')
else:
my_file.close()
my_txt.close()
|
apache-2.0
| 3,282,412,746,953,365,500 | 43.88 | 145 | 0.559269 | false | 3.561905 | false | false | false |
tyrylu/pyfmodex
|
pyfmodex/studio/event_description.py
|
1
|
1936
|
"""The description for an FMOD Studio Event."""
from ctypes import byref, c_int, c_void_p, create_string_buffer
from .event_instance import EventInstance
from .studio_object import StudioObject
from .enums import LOADING_STATE
class EventDescription(StudioObject):
"""The description for an FMOD Studio Event.
Event descriptions belong to banks and can be queried after the relevant
bank has been loaded. Event descriptions may be retrieved via path or GUID
lookup, or by enumerating all descriptions in a bank.
"""
function_prefix = "FMOD_Studio_EventDescription"
@property
def path(self):
"""The path."""
required = c_int()
self._call("GetPath", None, 0, byref(required))
path_buffer = create_string_buffer(required.value)
self._call("GetPath", path_buffer, len(path_buffer), None)
return path_buffer.value.decode("utf-8")
def create_instance(self):
"""Create a playable instance."""
instance_ptr = c_void_p()
self._call("CreateInstance", byref(instance_ptr))
return EventInstance(instance_ptr)
@property
def parameter_description_count(self):
"""The number of parameters in the event."""
count = c_int()
self._call("GetParameterDescriptionCount", byref(count))
return count.value
@property
def user_property_count(self):
"""The number of user properties attached to the event."""
count = c_int()
self._call("GetUserPropertyCount", byref(count))
return count.value
def load_sample_data(self):
"""Loads non-streaming sample data used by the event."""
self._call("LoadSampleData")
@property
def sample_loading_state(self):
"""Retrieves the sample data loading state."""
state = c_int()
self._call("GetSampleLoadingState", byref(state))
return LOADING_STATE(state.value)
|
mit
| -1,565,802,748,173,493,800 | 32.982456 | 78 | 0.657541 | false | 4.190476 | false | false | false |
kronicz/ecommerce-2
|
bin/player.py
|
1
|
2211
|
#!/Users/michelkrohn-dale/Desktop/ecommerce-2/bin/python
#
# The Python Imaging Library
# $Id$
#
from __future__ import print_function
try:
from tkinter import *
except ImportError:
from Tkinter import *
from PIL import Image, ImageTk
import sys
# --------------------------------------------------------------------
# an image animation player
class UI(Label):
def __init__(self, master, im):
if isinstance(im, list):
# list of images
self.im = im[1:]
im = self.im[0]
else:
# sequence
self.im = im
if im.mode == "1":
self.image = ImageTk.BitmapImage(im, foreground="white")
else:
self.image = ImageTk.PhotoImage(im)
Label.__init__(self, master, image=self.image, bg="black", bd=0)
self.update()
try:
duration = im.info["duration"]
except KeyError:
duration = 100
self.after(duration, self.next)
def next(self):
if isinstance(self.im, list):
try:
im = self.im[0]
del self.im[0]
self.image.paste(im)
except IndexError:
return # end of list
else:
try:
im = self.im
im.seek(im.tell() + 1)
self.image.paste(im)
except EOFError:
return # end of file
try:
duration = im.info["duration"]
except KeyError:
duration = 100
self.after(duration, self.next)
self.update_idletasks()
# --------------------------------------------------------------------
# script interface
if __name__ == "__main__":
if not sys.argv[1:]:
print("Syntax: python player.py imagefile(s)")
sys.exit(1)
filename = sys.argv[1]
root = Tk()
root.title(filename)
if len(sys.argv) > 2:
# list of images
print("loading...")
im = []
for filename in sys.argv[1:]:
im.append(Image.open(filename))
else:
# sequence
im = Image.open(filename)
UI(root, im).pack()
root.mainloop()
|
mit
| -4,403,060,269,132,047,400 | 20.676471 | 72 | 0.475351 | false | 4.13271 | false | false | false |
yaukwankiu/armor
|
dataStreamTools/cwbFileNames.py
|
1
|
2022
|
"""
to fix the file names for the data from cwb
201403111800f006_M15.txt -> WRF15.20140312.0000.txt
"""
#from armor import pattern
#import shutil
import os
import datetime, time
from armor import defaultParameters as dp
root = dp.defaultRootFolder
obsFolder = root + "data/march2014/QPESUMS/"
wrfsFolder1 = root + "data/march2014/WEPS/20140311/"
wrfsFolder2 = root + "data/march2014/WEPS/20140312/"
wrfsFolder3 = root + "data/march2014/WEPS/20140313/"
kongreywrf = root + "data/KONG-REY/WRFEPS/"
may19 = root + "data/may14/WRFEPS19[regridded]/"
may20 = root + "data/may14/WRFEPS20[regridded]/"
may21 = root + "data/may14/WRFEPS21[regridded]/"
may22 = root + "data/may14/WRFEPS22[regridded]/"
may23 = root + "data/may14/WRFEPS23[regridded]/"
folderList = [ may21, may22, may23] #<-- change here
#folderList=[may20]
count = 0
#for folder in [wrfsFolder1, wrfsFolder2, wrfsFolder3]:
for folder in folderList:
print "Folder", folder
os.makedirs(folder+"a/")
#time.sleep(2)
L = os.listdir(folder)
L = [v for v in L if v.endswith(".txt") and not v.startswith("WRF")]
L.sort()
for f1 in L:
count +=1
print count, f1,
path1 = folder + f1
year = int(f1[0:4])
month = int(f1[4:6])
day = int(f1[6:8])
hour = int(f1[8:10])
minute = int(f1[10:12])
hourDiff= int(f1[13:16])
modelNo = f1[18:20]
suffix = f1[20:]
T = datetime.datetime(year, month, day, hour, minute) + datetime.timedelta(hourDiff*1./24)
year2 = str(T.year)
month2 = ("0"+str(T.month))[-2:]
day2 = ("0"+str(T.day))[-2:]
hour2 = ("0"+str(T.hour))[-2:]
minute2 = ("0"+str(T.minute))[-2:]
f2 = "WRF" + modelNo + "." + year2 + month2 + day2 + "." + hour2 + minute2 + suffix
print "->", f2
try:
os.rename(folder+f1, folder+"a/"+f2)
except:
print f1, "not found!!"
|
cc0-1.0
| 550,889,382,923,040,960 | 33.271186 | 104 | 0.577151 | false | 2.743555 | false | false | false |
eSmelser/SnookR
|
SnookR/api/views.py
|
1
|
3749
|
import hashlib
from rest_framework.generics import ListCreateAPIView, UpdateAPIView, ListAPIView, RetrieveAPIView, CreateAPIView
from django.core.cache import caches
from rest_framework.response import Response
from substitutes.models import Session, SessionEvent, Sub
from accounts.models import CustomUser
from teams.models import Team, NonUserPlayer
from invites.models import SessionEventInvite, TeamInvite
from api.serializers import (
TeamInviteSerializer,
TeamInviteUpdateSerializer,
TeamSerializer,
CustomUserSerializer,
NonUserPlayerSerializer,
SessionSerializer,
SessionEventSerializer,
SubSerializer,
SessionEventInviteSerializer,
)
from api.permissions import TeamPermission, TeamInvitePermission
from api.filters import TeamFilter, TeamInviteFilter, UserFilter, SessionFilter, SessionEventFilter, SubFilter, \
SessionEventInviteFilter
class UserView(RetrieveAPIView):
serializer_class = CustomUserSerializer
queryset = CustomUser.objects.all()
def get_object(self):
return CustomUser.objects.get(username=self.request.user.username)
class UserSearchView(ListAPIView):
serializer_class = CustomUserSerializer
queryset = CustomUser.objects.all()
def list(self, request, *args, **kwargs):
import pdb;
pdb.set_trace()
class UserListView(ListAPIView):
serializer_class = CustomUserSerializer
queryset = CustomUser.objects.all()
filter_class = UserFilter
filter_fields = ('username', 'id', 'first_name', 'last_name')
class TeamView(ListCreateAPIView):
queryset = Team.objects.all()
serializer_class = TeamSerializer
permission_classes = (TeamPermission,)
filter_class = TeamFilter
filter_fields = ('id', 'name')
class TeamInviteListView(ListCreateAPIView):
queryset = TeamInvite.objects.all()
serializer_class = TeamInviteSerializer
permission_classes = (TeamInvitePermission,)
filter_class = TeamInviteFilter
filter_fields = ('invitee', 'team', 'status', 'id')
class TeamInviteUpdateView(UpdateAPIView):
queryset = TeamInvite.objects.all()
serializer_class = TeamInviteUpdateSerializer
class NonUserPlayerListCreateView(ListCreateAPIView):
queryset = NonUserPlayer.objects.all()
serializer_class = NonUserPlayerSerializer
class SessionListView(ListAPIView):
serializer_class = SessionSerializer
queryset = Session.objects.all()
filter_class = SessionFilter
filter_fields = tuple(['division'] + list(SessionFilter.Meta.fields.keys()))
class SubListView(ListCreateAPIView):
serializer_class = SubSerializer
queryset = Sub.objects.all()
filter_class = SubFilter
class SessionEventListView(ListAPIView):
queryset = SessionEvent.objects.all()
serializer_class = SessionEventSerializer
filter_class = SessionEventFilter
class SearchUserView(ListAPIView):
def list(self, request, *args, **kwargs):
cache = caches['default']
query = self.request.GET.get('query', '')
key = 'search_user_view:%s' % hashlib.md5(query.encode('ascii', 'ignore')).hexdigest()
objs = cache.get(key)
if objs is None:
objs = CustomUser.objects.search(query)
cache.set(key, objs, 60 * 3)
serializer = CustomUserSerializer(objs, many=True)
return Response(serializer.data)
class SessionEventInviteListView(ListCreateAPIView):
queryset = SessionEventInvite.objects.all()
serializer_class = SessionEventInviteSerializer
filter_class = SessionEventInviteFilter
class SessionEventInviteView(RetrieveAPIView):
queryset = SessionEventInvite.objects.all()
serializer_class = SessionEventInviteSerializer
filter_class = SessionEventInviteFilter
|
mit
| 78,620,710,748,288,240 | 30.504202 | 113 | 0.746599 | false | 4.179487 | false | false | false |
alvare/python-funcional
|
parser-monad.py
|
1
|
1535
|
import sys
from pymonad.Reader import curry
from Parser import *
def item():
return Parser(lambda cs: [] if cs == "" else [(cs[0], cs[1:])])
def sat(cond):
return item() >> (lambda c: unit(Parser, c) if cond(c) else mzero(Parser))
def char(c):
return sat(lambda x: c == x)
def space():
return char(' ')
def oneOf(chars):
return sat(lambda x: x in chars)
def many(p):
return many1(p) | unit(Parser, [])
def many1(p):
return p >> (lambda x:
many(p) >> (lambda xs:
unit(Parser, [x] + xs)))
def sepby(p, sep):
return sepby1(p, sep) | unit(Parser, [])
def sepby1(p, sep):
return p >> (lambda x:
many(sep >> p) >> (lambda xs:
unit(Parser, [x] + xs)))
def string(s):
if s:
return char(s[0]) >>\
string(s[1:]) >>\
unit(Parser, s)
else:
return unit(Parser, '')
def chainl(p, op, a):
return chainl1(p, op) | unit(Parser, a)
def chainl1(p, op):
def rest(a):
return (op >> (lambda f: p >> (lambda b: rest(f(a, b))))) | unit(Parser, a)
return p >> rest
# examples
alpha = 'abcdefghijklmnopqrstuvwxyz'
num = '1234567890'
def word():
return many(oneOf(alpha)) >> (lambda w:
unit(Parser, reduce(lambda x, y: x + y, w, '')))
def array_parser():
return string('array') >>\
space() >>\
char('[') >>\
sepby(word(), char(',')) >> (lambda e:
char(']') >>
unit(Parser, e))
print(array_parser().parse(sys.argv[1]))
|
mit
| 4,481,513,220,722,752,500 | 21.573529 | 83 | 0.522476 | false | 3.05169 | false | false | false |
Commonists/bingo
|
bingo/card_maker.py
|
1
|
1930
|
# -*- coding: utf-8 -*-
import os
from string import Template, ascii_uppercase
import itertools
class CardMaker(object):
def __init__(self, size):
self.size = size
self.elements = []
def make_card(self):
raise NotImplemented()
class MarkdownCardMaker(CardMaker):
def make_card(self):
res = []
for row_index in range(self.size):
row_items = self.elements[row_index:row_index + self.size]
res.append(self.make_row(row_items))
return '\n'.join(res)
def make_row(self, elements):
return "|".join(elements)
class LaTeXCardMaker(CardMaker):
def read_template(self):
path = os.path.dirname(os.path.realpath(__file__))
template_file = os.path.join(path, 'tex', 'template.tex')
return open(template_file, 'r').read()
def make_card(self):
contents = self.make_card_contents()
return contents
def make_card_contents(self):
template = Template(self.read_template())
node_definitions = self.make_node_definitions()
values = {
'title': "Wikimania 2015 bingo",
'size': self.size,
'sequence': '1/A, 2/B, 3/C, 4/D, 5/E',
'node_definitions': "\n".join(node_definitions)
}
return template.safe_substitute(values)
def get_node_list(self):
alphabet = ascii_uppercase
letters = alphabet[0:self.size]
cartesian_product = itertools.product(letters, letters)
node_list = ['%s%s' % (x, y) for (x, y) in cartesian_product]
node_list.remove('CC')
return node_list
def make_node_definitions(self):
nodes = self.get_node_list()
return [self.make_element(x, y) for (x, y) in zip(nodes, self.elements)]
def make_element(self, index, contents):
element = r"\newcommand{\Node%s}{%s}" % (index, contents)
return element
|
mit
| 8,819,562,063,728,641,000 | 27.80597 | 80 | 0.592746 | false | 3.600746 | false | false | false |
mtivadar/qiew
|
TextDecorators.py
|
1
|
13791
|
from PyQt5 import QtGui, QtCore, QtWidgets
import re
import string
from time import time
import sys
import TextSelection
class CTextDecorator(object):
redPen = QtGui.QPen(QtGui.QColor(255, 0, 0))
greenPen = QtGui.QPen(QtGui.QColor(255, 255, 0))
whitePen = QtGui.QPen(QtGui.QColor(255, 255, 255))
normalPen = QtGui.QPen(QtGui.QColor(192, 192, 192), 1, QtCore.Qt.SolidLine)
MZbrush = QtGui.QBrush(QtGui.QColor(128, 0, 0))
grayBrush = QtGui.QBrush(QtGui.QColor(128, 128, 128))
def __init__(self):
pass
class TextDecorator(CTextDecorator):
def __init__(self, viewmode):
self.operations = []
self.dataModel = viewmode.getDataModel()
self.viewmode = viewmode
self.penMap = {}
self.brushMap = {}
self.PenInterval = []
self.normalPen = QtGui.QPen(QtGui.QColor(192, 192, 192), 1, QtCore.Qt.SolidLine)
# if we want to generate T/F table
self.Special = string.ascii_letters + string.digits + ' .;\':;=\"?-!()/\\_'
self.Special = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False, False, False, False, True, True, True, False, False, False, False, True, True,
True, False, False, False, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, False, True, False, True,
False, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True,
True, True, True, False, True, False, False, True, False, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True,
True, True, True, True, True, True, True, True, True, True, True, False, False, False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
def reset(self):
self.penMap = {}
self.brushMap = {}
self.PenInterval = []
def getDataModel(self):
return self.dataModel
def isText(self, c):
"""
D = []
for i in range(256):
b = False
if self.isText(chr(i)):
b = True
D.append(b)
print D
sys.exit()
"""
return self.Special[ord(c)]
def getChar(self, idx):
#self.page = self.getDataModel().getDisplayablePage()
if idx < len(self.page):
return self.page[idx]
return 0
def decorate(self, pageOffset=None):
if pageOffset:
self.page = self.viewmode.getDisplayablePage(pageOffset=pageOffset)
else:
self.page = self.viewmode.getDisplayablePage()
return self.page
def addPenInterval(self, a, b, pen, ignoreHighlights=True):
self.PenInterval.append((a, b, pen, ignoreHighlights))
def choosePen(self, idx):
key = self.dataModel.getOffset() + idx
# if we do have a pen with that index, return it if it's different than default pen
# otherwise, return the pen that was set in that interval
# the priority here is de pen from other transformations, than interval pen
for a, b, ignoreHighlights, pen in self.PenInterval:
# in interval
if a <= key <= b:
if ignoreHighlights:
return pen
if key in self.penMap:
if self.penMap[key] == self.normalPen:
return pen
else:
return self.penMap[key]
else:
return pen
if key in self.penMap:
return self.penMap[key]
return self.normalPen
def chooseBrush(self, idx):
off = self.dataModel.getOffset() + idx
if off in self.brushMap:
return self.brushMap[off]
return None
class PageDecorator(TextDecorator):
def __init__(self, decorated):
pass
def reset(self):
self.decorated.reset()
self.penMap = {}
self.brushMap = {}
self.PenInterval = []
def getBrushMap(self):
return self.brushMap
def getPenMap(self):
return self.penMap
def doit(self):
pass
def getDataModel(self):
return self.dataModel
class HighlightASCII(PageDecorator):
def __init__(self, decorated):
self.dataModel = decorated.getDataModel()
self.penMap = decorated.penMap
self.decorated = decorated
super(HighlightASCII, self).__init__(decorated)
self.dataModel = super(HighlightASCII, self).getDataModel()
def decorate(self, pageOffset=None):
page = self.decorated.decorate(pageOffset)
self.PenInterval = self.decorated.PenInterval
self.brushMap = self.decorated.brushMap
self.penMap = self.decorated.penMap
off = self.dataModel.getOffset()
Match = [(m.start(), m.end()) for m in re.finditer(rb'([a-zA-Z0-9\-\\.%*:/? _<>]){4,}', page)]
for s, e in Match:
for i in range(e-s):
idx = off + s + i
if idx not in self.penMap:
self.penMap[off + s + i] = self.redPen
self.page = page
return self.page
class HighlightPrefix(PageDecorator):
def __init__(self, decorated, text, additionalLength=0, brush=None, pen=None):
super(HighlightPrefix, self).__init__(decorated)
self.dataModel = decorated.getDataModel()
self.decorated = decorated
self.additionalLength = additionalLength
self.brush = brush
self.text = text
self.pen = pen
def decorate(self, pageOffset=None):
page = self.decorated.decorate(pageOffset)
self.PenInterval = self.decorated.PenInterval
self.brushMap = self.decorated.brushMap
self.penMap = self.decorated.penMap
self.page = self.highliteWithPrefix(page, self.text, self.additionalLength, self.brush, self.pen)
return self.page
def highliteWithPrefix(self, page, text, additionalLength=0, brush=None, pen=None):
# todo: nu am gasit o metoda mai eleganta pentru a selecta toate aparitiile ale lui text
# regexp nu merg, "bad re expression"
lenText = len(text)
M = []
idx = 0
if lenText > 0:
while idx < len(page):
idx = page.find(text.encode('utf-8'), idx, len(page))
if idx == -1:
break
M.append((idx, lenText + additionalLength))
idx += lenText + additionalLength
off = self.dataModel.getOffset()
for start, length in M:
for i in range(length):
self.penMap[off + start + i] = pen
self.brushMap[off + start + i] = brush
return page
class HighlightWideChar(PageDecorator):
def __init__(self, decorated):
super(HighlightWideChar, self).__init__(decorated)
self.dataModel = decorated.getDataModel()
self.decorated = decorated
def decorate(self, pageOffset=None):
self.page = self.decorated.decorate(pageOffset)
self.PenInterval = self.decorated.PenInterval
self.brushMap = self.decorated.brushMap
self.penMap = self.decorated.penMap
self.page = self.highliteWidechar2(self.page)
return self.page
def highliteWidechar2(self, page):
pageStart = self.dataModel.getOffset()
pageEnd = pageStart + len(page)
touched = False
#for s, e in self.Intervals:
# touched = True
if not touched:
# expand
Match = [(m.start(), m.end()) for m in re.finditer(rb'([a-zA-Z0-9\-\\.%*:/? ]\x00){4,}', page)]
for s, e in Match:
for i in range(e-s):
#print i
self.penMap[pageStart + s + i] = QtGui.QPen(QtGui.QColor(255, 255, 0))
# get rid of '\x00'
string = page[s:e:2]
l = len(string)
# copy string that has no zeros
page[s:s + l] = string
# fill with zeros the remaining space
page[s + l: s + 2*l] = b'\x00'*l
return page
### todo: other way to highlight widechar, should test and see which one is faster
"""
def _changeText(self, page, page_start, I):
page_end = page_start + len(page)
for obj in I:
if obj['s'] >= page_start and obj['e'] <= page_end:
page[obj['s']-page_start:obj['e']-page_start] = obj['text']
def _expand(self, page, off, start, end):
I = []
start = start - off
end = end - off
i = start
while i < end:
if i+1 < end:
if page[i+1] == 0 and self.isText(chr(page[i])):
k = 0
for j in xrange(i, end, 2):
if j + 1 < end:
if self.isText(chr(page[j])) and page[j+1] == 0:
k += 1
else:
break
if k > 4:
if i+k*2 <= end:
obj = {}
obj['s'] = off + i + 1
obj['e'] = off + i + k * 2
for idx, j in enumerate(range(i+1, i + k*2)):
if j > i + k:
page[j] = 0
#self.penMap[j] = self.greenPen
elif j+idx+1 < end:
page[j] = page[j + idx + 1]
self.penMap[off + j] = self.greenPen
obj['text'] = page[i+1:i+k*2]
I.append(obj)
self.penMap[off + i] = self.greenPen
i += k*2
i = i + 1
return I
pass
def highliteWidechar(self, page):
off = self.dataModel.getOffset()
page_end = off + len(page)
touched = False
#print '-------'
for idx, iv in enumerate(self.Intervals):
#print 'acum aici'
# in interval
s, e, I = iv
#print s ,e
#print page_end
page_start = off
if off >= s:
touched = True
if page_end <= e:
self._changeText(page, off, I)
else:
if off <= e:
I2 = self._expand(page, off, e, page_end)
for obj in I2:
I.append(obj)
e = page_end
self.Intervals[idx] = (s, e, I)
else:
# suntem cu mai multe pagini mai jos
touched = False
else:
if page_end <= e and page_end >= s:
# scrolled up
I2 = self._expand(page, off, page_start, s)
for obj in I2:
I.append(obj)
s = page_start
self.Intervals[idx] = (s, e, I)
touched = True
else:
# out of this interval
touched = False
if not touched or touched:
#print 'aici'
self.Intervals.append((off, page_end, self._expand(page, off, off, page_end)))
"""
class RangePen(PageDecorator):
def __init__(self, decorated, a, b, pen, ignoreHighlights=True):
super(RangePen, self).__init__(decorated)
self.dataModel = decorated.getDataModel()
self.decorated = decorated
self.a = a
self.b = b
self.pen = pen
self.already = False
self.ignoreHighlights = ignoreHighlights
def decorate(self, pageOffset=None):
self.page = self.decorated.decorate(pageOffset)
self.PenInterval = self.decorated.PenInterval
self.brushMap = self.decorated.brushMap
self.penMap = self.decorated.penMap
if not self.already:
self.addPenInterval(self.a, self.b, self.ignoreHighlights, self.pen)
self.already = True
return self.page
|
gpl-2.0
| -3,535,276,325,798,863,400 | 31.992823 | 171 | 0.516279 | false | 4.011344 | false | false | false |
google/makani
|
avionics/firmware/network/generate_net_send.py
|
1
|
4158
|
#!/usr/bin/python
# Copyright 2020 Makani Technologies LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate NetSendAioXXX functions to pack & send messages on the TMS570."""
import sys
import textwrap
import gflags
from makani.avionics.common import pack_avionics_messages
from makani.avionics.network import message_type
from makani.lib.python import c_helpers
gflags.DEFINE_string('source_file', None,
'Full path to output source file.',
short_name='s')
gflags.DEFINE_string('header_file', None,
'Full path to output header file.',
short_name='h')
FLAGS = gflags.FLAGS
message_type_helper = c_helpers.EnumHelper('MessageType', message_type)
def _GenStructName(message_type_name):
"""Generate C message structure for a given message type."""
return message_type_helper.ShortName(message_type_name) + 'Message'
def _GenPackFunctionName(message_type_name):
"""Generate C pack function name for a given message type."""
return 'Pack' + _GenStructName(message_type_name)
def _GenPackedSizeMacroName(message_type_name):
"""Generate packed size macro name for a given message type."""
return 'PACK_' + _GenStructName(message_type_name).upper() + '_SIZE'
def _WriteNetSendAioFunction(message_type_name, f):
"""Write NetSendAio<MessageName>() function."""
struct_name = _GenStructName(message_type_name)
size_macro = _GenPackedSizeMacroName(message_type_name)
pack_func = _GenPackFunctionName(message_type_name)
pack_cast = 'PackAioMessageFunction'
f.write(textwrap.dedent('''
COMPILE_ASSERT({1} <= MAX_AIO_PAYLOAD_SIZE,
{1}_must_fit_within_MAX_AIO_PAYLOAD_SIZE);
bool NetSendAio{0}(const {0} *msg) {{
return NetSendAioPacked({2}, ({3}){4}, msg);
}}
'''.format(struct_name, size_macro, message_type_name, pack_cast,
pack_func)))
def _WriteNetSendAioPrototype(message_type_name, f):
"""Write NetSendAio<MessageName>() prototype."""
struct_name = _GenStructName(message_type_name)
f.write('bool NetSendAio{0}(const {0} *msg);\n'.format(struct_name))
def _WriteSource(messages, f):
"""Write source file."""
f.write(textwrap.dedent('''
#include "avionics/firmware/network/net_send.h"
#include <stdbool.h>
#include "avionics/common/avionics_messages.h"
#include "avionics/common/pack_avionics_messages.h"
#include "avionics/firmware/network/net.h"
#include "avionics/network/message_type.h"
#include "common/macros.h"
''')[1:])
for m in messages:
_WriteNetSendAioFunction(m, f)
def _WriteHeader(messages, f):
"""Write header file."""
guard = 'AVIONICS_FIRMWARE_NETWORK_NET_SEND_H_'
f.write(textwrap.dedent('''
#ifndef {0}
#define {0}
#include <stdbool.h>
#include "avionics/common/avionics_messages.h"
#include "avionics/firmware/network/net.h"
'''.format(guard))[1:])
for m in messages:
_WriteNetSendAioPrototype(m, f)
f.write(textwrap.dedent('''
#endif // {0}'''.format(guard)))
def main(argv):
"""Entry point."""
try:
argv = FLAGS(argv)
except gflags.FlagsError, e:
print '{}\nUsage: {} ARGS\n{}'.format(e, sys.argv[0], FLAGS)
sys.exit(1)
messages = [m for (m, _) in message_type_helper
if _GenPackFunctionName(m) in pack_avionics_messages.__dict__]
if FLAGS.header_file:
with open(FLAGS.header_file, 'w') as f:
_WriteHeader(messages, f)
if FLAGS.source_file:
with open(FLAGS.source_file, 'w') as f:
_WriteSource(messages, f)
if __name__ == '__main__':
main(sys.argv)
|
apache-2.0
| -7,501,108,895,089,029,000 | 30.984615 | 77 | 0.672198 | false | 3.361358 | false | false | false |
thecotne/smart_less_build
|
executer.py
|
1
|
10324
|
import sublime, sublime_plugin
import os, sys
import threading
import subprocess
import functools
import time
class ProcessListener(object):
def on_data(self, proc, data):
pass
def on_finished(self, proc):
pass
# Encapsulates subprocess.Popen, forwarding stdout to a supplied
# ProcessListener (on a separate thread)
class AsyncProcess(object):
def __init__(self, cmd, shell_cmd, env, listener,
# "path" is an option in build systems
path="",
# "shell" is an options in build systems
shell=False):
if not shell_cmd and not cmd:
raise ValueError("shell_cmd or cmd is required")
if shell_cmd and not isinstance(shell_cmd, str):
raise ValueError("shell_cmd must be a string")
self.listener = listener
self.killed = False
self.start_time = time.time()
# Hide the console window on Windows
startupinfo = None
if os.name == "nt":
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
# Set temporary PATH to locate executable in cmd
if path:
old_path = os.environ["PATH"]
# The user decides in the build system whether he wants to append $PATH
# or tuck it at the front: "$PATH;C:\\new\\path", "C:\\new\\path;$PATH"
os.environ["PATH"] = os.path.expandvars(path)
proc_env = os.environ.copy()
proc_env.update(env)
for k, v in proc_env.items():
proc_env[k] = os.path.expandvars(v)
if shell_cmd and sys.platform == "win32":
# Use shell=True on Windows, so shell_cmd is passed through with the correct escaping
self.proc = subprocess.Popen(shell_cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo, env=proc_env, shell=True)
elif shell_cmd and sys.platform == "darwin":
# Use a login shell on OSX, otherwise the users expected env vars won't be setup
self.proc = subprocess.Popen(["/bin/bash", "-l", "-c", shell_cmd], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo, env=proc_env, shell=False)
elif shell_cmd and sys.platform == "linux":
# Explicitly use /bin/bash on Linux, to keep Linux and OSX as
# similar as possible. A login shell is explicitly not used for
# linux, as it's not required
self.proc = subprocess.Popen(["/bin/bash", "-c", shell_cmd], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo, env=proc_env, shell=False)
else:
# Old style build system, just do what it asks
self.proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo, env=proc_env, shell=shell)
if path:
os.environ["PATH"] = old_path
if self.proc.stdout:
threading.Thread(target=self.read_stdout).start()
if self.proc.stderr:
threading.Thread(target=self.read_stderr).start()
def kill(self):
if not self.killed:
self.killed = True
if sys.platform == "win32":
# terminate would not kill process opened by the shell cmd.exe, it will only kill
# cmd.exe leaving the child running
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
subprocess.Popen("taskkill /PID " + str(self.proc.pid), startupinfo=startupinfo)
else:
self.proc.terminate()
self.listener = None
def poll(self):
return self.proc.poll() == None
def exit_code(self):
return self.proc.poll()
def read_stdout(self):
while True:
data = os.read(self.proc.stdout.fileno(), 2**15)
if len(data) > 0:
if self.listener:
self.listener.on_data(self, data)
else:
self.proc.stdout.close()
if self.listener:
self.listener.on_finished(self)
break
def read_stderr(self):
while True:
data = os.read(self.proc.stderr.fileno(), 2**15)
if len(data) > 0:
if self.listener:
self.listener.on_data(self, data)
else:
self.proc.stderr.close()
break
class ExecuterCommand(sublime_plugin.WindowCommand, ProcessListener):
def run(self, cmd = None, shell_cmd = None, file_regex = "", line_regex = "", working_dir = "",
encoding = "utf-8", env = {}, quiet = False, kill = False,
word_wrap = True, syntax = "Packages/Text/Plain text.tmLanguage",
# Catches "path" and "shell"
**kwargs):
if kill:
if self.proc:
self.proc.kill()
self.proc = None
self.append_string(None, "[Cancelled]")
return
if not hasattr(self, 'output_view'):
# Try not to call get_output_panel until the regexes are assigned
self.output_view = self.window.create_output_panel("exec")
# Default the to the current files directory if no working directory was given
if (working_dir == "" and self.window.active_view()
and self.window.active_view().file_name()):
working_dir = os.path.dirname(self.window.active_view().file_name())
self.output_view.settings().set("result_file_regex", file_regex)
self.output_view.settings().set("result_line_regex", line_regex)
self.output_view.settings().set("result_base_dir", working_dir)
self.output_view.settings().set("word_wrap", word_wrap)
self.output_view.settings().set("line_numbers", False)
self.output_view.settings().set("gutter", False)
self.output_view.settings().set("scroll_past_end", False)
self.output_view.assign_syntax(syntax)
# Call create_output_panel a second time after assigning the above
# settings, so that it'll be picked up as a result buffer
self.window.create_output_panel("exec")
self.encoding = encoding
self.quiet = quiet
self.proc = None
if not self.quiet:
if shell_cmd:
print("Running " + shell_cmd)
else:
print("Running " + " ".join(cmd))
sublime.status_message("Building")
# show_panel_on_build = sublime.load_settings("Preferences.sublime-settings").get("show_panel_on_build", True)
# if show_panel_on_build:
# self.window.run_command("show_panel", {"panel": "output.exec"})
merged_env = env.copy()
if self.window.active_view():
user_env = self.window.active_view().settings().get('build_env')
if user_env:
merged_env.update(user_env)
# Change to the working dir, rather than spawning the process with it,
# so that emitted working dir relative path names make sense
if working_dir != "":
os.chdir(working_dir)
self.debug_text = ""
if shell_cmd:
self.debug_text += "[shell_cmd: " + shell_cmd + "]\n"
else:
self.debug_text += "[cmd: " + str(cmd) + "]\n"
self.debug_text += "[dir: " + str(os.getcwd()) + "]\n"
# if "PATH" in merged_env:
# self.debug_text += "[path: " + str(merged_env["PATH"]) + "]"
# else:
# self.debug_text += "[path: " + str(os.environ["PATH"]) + "]"
try:
# Forward kwargs to AsyncProcess
self.proc = AsyncProcess(cmd, shell_cmd, merged_env, self, **kwargs)
except Exception as e:
self.append_string(None, str(e) + "\n")
self.append_string(None, self.debug_text + "\n")
if not self.quiet:
self.append_string(None, "[Finished]")
def is_enabled(self, kill = False):
if kill:
return hasattr(self, 'proc') and self.proc and self.proc.poll()
else:
return True
def append_data(self, proc, data):
if proc != self.proc:
# a second call to exec has been made before the first one
# finished, ignore it instead of intermingling the output.
if proc:
proc.kill()
return
try:
str = data.decode(self.encoding)
except:
str = "[Decode error - output not " + self.encoding + "]\n"
proc = None
# Normalize newlines, Sublime Text always uses a single \n separator
# in memory.
str = str.replace('\r\n', '\n').replace('\r', '\n')
self.output_view.run_command('append', {'characters': str, 'force': True, 'scroll_to_end': True})
def append_string(self, proc, str):
self.append_data(proc, str.encode(self.encoding))
def finish(self, proc):
if not self.quiet:
elapsed = time.time() - proc.start_time
exit_code = proc.exit_code()
if exit_code == 0 or exit_code == None:
# self.append_string(proc,
# ("[Finished in %.1fs]" % (elapsed)))
pass
else:
self.append_string(proc, ("[Finished in %.1fs with exit code %d]\n"
% (elapsed, exit_code)))
self.append_string(proc, self.debug_text)
self.window.run_command("show_panel", {"panel": "output.exec"})
if proc != self.proc:
return
errs = self.output_view.find_all_results()
if len(errs) == 0:
# sublime.status_message("Build finished")
sublime.status_message(("Build Finished in %.1fs" % (elapsed)))
# else:
# sublime.status_message(("Build finished with %d errors") % len(errs))
def on_data(self, proc, data):
sublime.set_timeout(functools.partial(self.append_data, proc, data), 0)
def on_finished(self, proc):
sublime.set_timeout(functools.partial(self.finish, proc), 0)
|
gpl-3.0
| -967,074,813,617,226,400 | 38.106061 | 118 | 0.565866 | false | 4 | false | false | false |
google/dnae
|
lib/utils/utils.py
|
1
|
6860
|
# Copyright 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility modules - Misc utility methods."""
import calendar
import datetime
import logging
import re
import time
class TextUtils(object):
"""Provides text/string related utility methods."""
def __init__(self):
pass
@classmethod
def toascii(cls, input_string):
temp_string = input_string
return str(re.sub(r"[^\x00-\x7F]+", "?", temp_string))
@classmethod
def removecommas(cls, input_string):
return cls.__replace(input_string, ",", " ")
@classmethod
def removequotes(cls, input_string):
return cls.__replace(input_string, '"', "")
@classmethod
def removenewlines(cls, input_string):
temp_string = cls.__replace(input_string, "\r", "")
temp_string = cls.__replace(temp_string, "\n", "")
return temp_string
@classmethod
def timestamp(cls, mode=None):
if mode == "short":
out = time.strftime("%Y%m%d")
else:
out = time.strftime("%Y%m%d_%H%M%S")
return out
@classmethod
def toidentifier(cls, input_string):
out = cls.toascii(input_string)
out = (out.replace(" ", "_").replace("&", "n").replace("@", "_")
.replace("#", "hash").replace("%", "x100").replace("'", "_")
.replace("+", "plus").replace("-", "minus").replace("*", "x")
.replace("/", "div").replace(".", "point").replace(",", "comma")
.replace("(", "_").replace(")", "_").replace("[", "_")
.replace("]", "_").replace("{", "_").replace("}", "_"))
return out
@classmethod
def strlist2str(cls, string_list, sep):
out = ""
for item in string_list:
out += item
out += sep
return out[:-1]
@classmethod
def __replace(cls, input_string, old, new):
replacefcn = lambda s: s.replace(old, new)
if isinstance(input_string, str):
out = replacefcn(input_string)
elif isinstance(input_string, list):
out = list()
for item in input_string:
if isinstance(item, str):
out.append(replacefcn(item))
else:
out.append(item)
else:
out = None
return out
class DateUtils(object):
"""Provides Date related utility methods."""
today = datetime.datetime.today()
curryear = today.year
one_day = datetime.timedelta(days=1)
quartermap = dict()
quartermap[1] = [
datetime.datetime(curryear, 1, 1),
datetime.datetime(curryear, 3, 31, 23, 59, 59, 999999)
]
quartermap[2] = [
datetime.datetime(curryear, 4, 1),
datetime.datetime(curryear, 6, 30, 23, 59, 59, 999999)
]
quartermap[3] = [
datetime.datetime(curryear, 7, 1),
datetime.datetime(curryear, 9, 30, 23, 59, 59, 999999)
]
quartermap[4] = [
datetime.datetime(curryear, 10, 1),
datetime.datetime(curryear, 12, 31, 23, 59, 59, 999999)
]
def __init__(self):
pass
@classmethod
def usec2date(cls, usec):
d0 = datetime.datetime(year=1970, month=1, day=1) # unix time reference
delta = datetime.timedelta(microseconds=usec)
return d0 + delta
@classmethod
def quarterstartdate(cls):
curr_quarter = (DateUtils.today.month - 1) / 3 + 1
quarter_start_date = cls.quartermap[curr_quarter][0]
return quarter_start_date
@classmethod
def quarterenddate(cls):
curr_quarter = (DateUtils.today.month - 1) / 3 + 1
quarter_end_date = cls.quartermap[curr_quarter][1]
return quarter_end_date
@classmethod
def lastdayofmonth(cls, month):
return calendar.monthrange(cls.curryear, month)[1]
@classmethod
def dbmdate2sfdate(cls, datestr):
return datetime.datetime.strptime(datestr, "%Y/%m/%d").strftime("%Y-%m-%d")
@classmethod
def firstdayofquarter(cls):
t = datetime.datetime.today().toordinal()
if t in [
datetime.datetime(cls.curryear, 1, 1).toordinal(),
datetime.datetime(cls.curryear, 4, 1).toordinal(),
datetime.datetime(cls.curryear, 7, 1).toordinal(),
datetime.datetime(cls.curryear, 10, 1).toordinal()
]:
return True
else:
return False
@classmethod
def firstdayofyear(cls):
t = datetime.datetime.today().toordinal()
if t == datetime.datetime(cls.curryear, 1, 1).toordinal():
return True
else:
return False
@classmethod
def quarterdays(cls):
return cls.quarterenddate().toordinal() - cls.quarterstartdate().toordinal(
) + 1
@classmethod
def dayofquarter(cls, date=None, date_format=None):
if not date:
date = cls.today
else:
date = datetime.datetime.strptime(date, date_format)
q2 = (datetime.datetime.strptime("4/1/{0:4d}".format(date.year),
"%m/%d/%Y")).timetuple().tm_yday
q3 = (datetime.datetime.strptime("7/1/{0:4d}".format(date.year),
"%m/%d/%Y")).timetuple().tm_yday
q4 = (datetime.datetime.strptime("10/1/{0:4d}".format(date.year),
"%m/%d/%Y")).timetuple().tm_yday
cur_day = date.timetuple().tm_yday
if date.month < 4:
return cur_day
elif date.month < 7:
return cur_day - q2 + 1
elif date.month < 10:
return cur_day - q3 + 1
else:
return cur_day - q4 + 1
class SelfIncreasingIndex(object):
"""Provides utility methods to create and use a self-increasing index."""
def __init__(self):
self.__value = 0
def __call__(self, *args, **kwargs):
val = self.__value
self.__value += 1
return val
def start(self, init_value=0):
self.__value = init_value
return self.__value
def nextval(self):
self.__value += 1
return self.__value
# Decorators
def retry(some_function, *args, **kwargs):
_MAX_RETRY = 5
def wrapper(*args, **kwargs):
retval = None
retry_attempts = 0
done = False
while not done:
try:
retval = some_function(*args, **kwargs)
done = True
# pylint: disable=broad-except
except Exception as error:
retry_attempts += 1
if retry_attempts <= _MAX_RETRY:
seconds = 2 ** retry_attempts
logging.warning("Encountered an error - %s -, "
"retrying in %d seconds...", str(error), seconds)
time.sleep(seconds)
else:
raise error
# pylint: enable=broad-except
return retval
return wrapper
|
apache-2.0
| 8,431,717,631,091,964,000 | 26.886179 | 79 | 0.61035 | false | 3.550725 | false | false | false |
ikreymer/pywb-ia
|
tools/iaitemhandler.py
|
1
|
3821
|
from pywb.webapp.handlers import WBHandler
from pywb.cdx.zipnum import ZipNumCluster
from pywb.cdx.cdxserver import CDXServer
import os
import requests
import shutil
#=============================================================================
class ItemHandler(WBHandler):
def __init__(self, query_handler, config=None):
self.item_cdx_root = config.get('index_paths')
self.download_prefix = config.get('archive_paths')
if not os.path.isdir(self.item_cdx_root):
os.makedirs(self.item_cdx_root)
super(ItemHandler, self).__init__(query_handler, config)
def handle_request(self, wbrequest):
self.load_item_files(wbrequest)
return super(ItemHandler, self).handle_request(wbrequest)
def load_item_files(self, wbrequest):
item = wbrequest.coll
idx_file = os.path.join(self.item_cdx_root, item + '.cdx.idx')
cdx_file = os.path.join(self.item_cdx_root, item + '.cdx.gz')
# first, try to download idx and use remote cdx
if not os.path.isfile(idx_file) and not os.path.isfile(cdx_file):
idx_url = self.download_prefix + item + '/' + item + '.cdx.idx'
try:
self.download_file(idx_url, idx_file)
self.number_idx(idx_file)
idx_found = True
except:
idx_found = False
if idx_found:
return
# try to download cdx file if no idx
cdx_url = self.download_prefix + item + '/' + item + '.cdx.gz'
try:
self.download_file(cdx_url, cdx_file)
except:
raise
def download_file(self, url, filename):
""" Download cdx or idx file locally
"""
r = requests.get(url, stream=True)
r.raise_for_status()
with open(filename, 'wb') as f:
for chunk in r.iter_content():
if chunk:
f.write(chunk)
f.flush()
def number_idx(self, filename):
""" If idx doesn't have the last line number column, add it
to allow for much better search ops
"""
with open(filename) as fh:
firstline = fh.readline()
parts = firstline.split('\t')
# only add if there are 4 parts
if len(parts) != 4:
return
count = 1
def writeline(fho, line, count):
fho.write(line.rstrip() + '\t' + str(count) + '\n')
with open(filename + '.tmp', 'w+b') as fho:
writeline(fho, firstline, count)
count += 1
for line in fh:
writeline(fho, line, count)
count += 1
shutil.move(filename + '.tmp', filename)
#=============================================================================
class ItemCDXServer(CDXServer):
def _create_cdx_sources(self, paths, config):
src = ItemCDXIndex(paths, config)
self.sources = [src]
#=============================================================================
class ItemCDXIndex(ZipNumCluster):
def __init__(self, summary, config):
self.root_path = summary
super(ItemCDXIndex, self).__init__(summary, config)
self.prefix = config.get('archive_paths')
def resolve(part, query):
coll = query.params.get('coll')
local_cdx = os.path.join(self.root_path, coll + '.cdx.gz')
remote_cdx = self.prefix + coll + '/' + part
return [local_cdx, remote_cdx]
self.loc_resolver = resolve
def load_cdx(self, query):
coll = query.params.get('coll')
full = os.path.join(self.root_path, coll + '.cdx.idx')
return self._do_load_cdx(full, query)
|
mit
| 1,659,556,733,371,929,600 | 33.423423 | 78 | 0.516619 | false | 3.910952 | true | false | false |
antivanov/chief-lunch-officer
|
find_cafe.py
|
1
|
8229
|
# Automatically fetches menus for today, grades predefined cafes and based on
# additional information (weather, cafe of choice yesterday) gives recommendations
# where to go for lunch.
# If there are problems with encoding set Python encoding correctly by executing:
# set PYTHONIOENCODING=utf-8
from chief_lunch_officer import ChiefLunchOfficer, WeatherOpinion, FoodTaste
from constants import TEMPERATURE, PRECIPITATION_CHANCE, PRECIPITATION_AMOUNT, WIND
from constants import NEPALESE, HIMA_SALI, DYLAN_MILK, FACTORY_SALMISAARI, PIHKA, ANTELL, SODEXO_ACQUA, SODEXO_EXPLORER
from preferences import FOOD_PREFERENCES
from cafes import CAFES
from decorators import get_ignore_errors_decorator
from pathlib import Path
from datetime import date, datetime, timedelta
from copy import deepcopy
import urllib.request
import json
import re
EmptyMenuOnError = get_ignore_errors_decorator(default_value='No menu. Data feed format for the cafe changed?')
HIMA_SALI_URL = 'http://www.himasali.com/p/lounaslista.html'
DYLAN_MILK_URL = 'http://dylan.fi/milk/'
PIHKA_URL = 'http://ruoholahti.pihka.fi/en/'
FACTORY_SALMISAARI_URL = 'http://www.ravintolafactory.com/ravintolat/helsinki-salmisaari/'
ANTELL_URL = 'http://www.antell.fi/lounaslistat/lounaslista.html?owner=146'
YLE_WEATHER_FORECAST_URL = 'http://yle.fi/saa/resources/ajax/saa-api/hourly-forecast.action?id=642554'
SODEXO_ACQUA_URL = 'http://www.sodexo.fi/carte/load/html/30/%s/day'
SODEXO_EXPLORER_URL = 'http://www.sodexo.fi/carte/load/html/31/%s/day'
def make_readable(content_with_html_tags, insert_new_lines=True, collapse_whitespace=False):
content_with_html_tags = re.sub('<br.*?>', '\n' if insert_new_lines else '', content_with_html_tags)
content_with_html_tags = re.sub('<.*?>', '', content_with_html_tags)
content_with_html_tags = re.sub('[ \t]+', ' ', content_with_html_tags)
content_with_html_tags = re.sub('\n+', '\n', content_with_html_tags)
if collapse_whitespace:
content_with_html_tags = re.sub('\s+', ' ', content_with_html_tags)
content_with_html_tags = re.sub("(.{80})", "\\1\n", content_with_html_tags, 0, re.DOTALL)
content_with_html_tags = content_with_html_tags.replace('&', '&').replace(' ', '')
return content_with_html_tags.encode('ascii', 'ignore').decode('ascii')
def get(url):
response = urllib.request.urlopen(url)
charset = response.headers.get_content_charset() if response.headers.get_content_charset() is not None else 'utf-8'
return response.read().decode(charset)
def get_and_find_all(url, regex):
html = get(url)
return re.findall(regex, html, re.MULTILINE | re.DOTALL)
def find_menu(url, date, regex, index=0):
weekday = date.weekday()
if (weekday > 4): #Saturday or Sunday
return 'Weekend: no menu'
found = get_and_find_all(url, regex)
if (len(found) == 0):
return 'No menu'
else:
return found[index]
@EmptyMenuOnError
def get_sodexo_explorer_menu(date):
menu_url = SODEXO_EXPLORER_URL % (date.strftime('%Y-%m-%d'))
menu = find_menu(menu_url, date, '(.*)')
menu = json.loads(menu)['foods']
return menu
@EmptyMenuOnError
def get_sodexo_acqua_menu(date):
menu_url = SODEXO_ACQUA_URL % (date.strftime('%Y-%m-%d'))
menu = find_menu(menu_url, date, '(.*)')
menu = json.loads(menu)['foods']
return menu
@EmptyMenuOnError
def get_antell_menu(date):
weekday = date.weekday()
return find_menu(ANTELL_URL, date, r'<h2[^>]+>(.*?)<img', weekday)
@EmptyMenuOnError
def get_hima_sali_menu(date):
date_label = '%d\\.%d\\.' % (date.day, date.month)
return find_menu(HIMA_SALI_URL, date, r'%s(.*?Wok.*?[\d\.]+)' % (date_label), -1)
@EmptyMenuOnError
def get_dylan_milk_menu(date):
return find_menu(DYLAN_MILK_URL, date, r'<div class="fbf_desc">(.*?)</div>')
@EmptyMenuOnError
def get_pihka_menu(date):
weekday = date.weekday()
found = get_and_find_all(PIHKA_URL, r'<div class="menu\-day.*?<ul>(.*?)</div>')
return found[weekday]
@EmptyMenuOnError
def get_factory_salmisaari_menu(date):
date_label = date.strftime('%d.%m.%Y')
found = get_and_find_all(FACTORY_SALMISAARI_URL, r'%s</h3>(.*?)</p>' % (date_label))
return found[0]
def get_todays_weather():
weather_response = get(YLE_WEATHER_FORECAST_URL)
forecast = json.loads(weather_response)['weatherInfos'][0]
return {
TEMPERATURE: forecast['temperature'],
PRECIPITATION_CHANCE: forecast['probabilityPrecipitation'],
PRECIPITATION_AMOUNT: forecast['precipitation1h'],
WIND: forecast['windSpeedMs']
}
def week_number(date):
return date.isocalendar()[1]
def parse_date(date_str):
return datetime.strptime(date_str, '%d.%m.%Y')
def get_current_week_history(today):
history_path = Path('history.json')
if not history_path.exists():
with history_path.open('w') as f:
f.write('{}')
with history_path.open('r') as f:
history = json.loads(f.read())
current_week = week_number(today)
def is_date_this_week_before_today(d):
return (current_week == week_number(d)
and d.date() < today)
current_week_history = {(k, v) for (k, v) in history.items() if is_date_this_week_before_today(parse_date(k))}
return dict(current_week_history)
def ordered_cafes(history):
sorted_dates = sorted(history)
cafes = []
for cafe_date in sorted_dates:
cafes.append(history[cafe_date])
return cafes
def store_history(history):
history_path = Path('history.json')
with history_path.open('w') as f:
f.write(json.dumps(history, sort_keys=True))
def update_history(history, today, todays_cafe):
history[today.strftime('%d.%m.%Y')] = todays_cafe
store_history(history)
today = date.today()
#today = today + timedelta(days=2)
print('Today %s\n' % today.strftime('%d.%m.%Y'))
sodexo_acqua_menu = get_sodexo_acqua_menu(today)
print('\nSodexo Acqua:\n\n%s' % make_readable(sodexo_acqua_menu, collapse_whitespace=True))
sodexo_explorer_menu = get_sodexo_explorer_menu(today)
print('\nSodexo Explorer:\n\n%s' % make_readable(sodexo_explorer_menu, collapse_whitespace=True))
antell_menu = get_antell_menu(today)
print('\nAntell:\n\n%s' % make_readable(antell_menu, collapse_whitespace=True))
hima_sali_menu = get_hima_sali_menu(today)
print('\nHima & Sali:\n\n%s' % make_readable(hima_sali_menu, insert_new_lines=False))
dylan_milk_menu = get_dylan_milk_menu(today)
print('\nDylan Milk:\n\n%s' % make_readable(dylan_milk_menu))
pihka_menu = get_pihka_menu(today)
print('\nPihka:\n\n%s' % make_readable(pihka_menu, collapse_whitespace=True))
factory_salmisaari_menu = get_factory_salmisaari_menu(today)
print('\nFactory Salmisaari:\n\n%s' % make_readable(factory_salmisaari_menu, insert_new_lines=False))
weather = get_todays_weather()
print('\nWeather:\n\n temperature %s C\n chance of precipitation %s percent\n precipitation amount %s mm\n wind %s m/s' % (weather[TEMPERATURE], weather[PRECIPITATION_CHANCE], weather[PRECIPITATION_AMOUNT], weather[WIND]))
lunch_history = get_current_week_history(today)
current_week_cafes = ordered_cafes(lunch_history)
print('\nLunch history for current week:\n\n %s' % ', '.join(current_week_cafes))
cafes = deepcopy(CAFES)
cafes[SODEXO_EXPLORER]['menu'] = sodexo_explorer_menu
cafes[SODEXO_ACQUA]['menu'] = sodexo_acqua_menu
cafes[ANTELL]['menu'] = antell_menu
cafes[HIMA_SALI]['menu'] = hima_sali_menu
cafes[DYLAN_MILK]['menu'] = dylan_milk_menu
cafes[PIHKA]['menu'] = pihka_menu
cafes[FACTORY_SALMISAARI]['menu'] = factory_salmisaari_menu
food_taste = FoodTaste().preferences(FOOD_PREFERENCES)
weather_opinion = WeatherOpinion().weather(weather)
clo = ChiefLunchOfficer(food_taste=food_taste, weather_opinion=weather_opinion)
clo.lunched(current_week_cafes).weather(weather).cafes(cafes).weekday(today.weekday())
todays_cafes = clo.decide()
todays_cafe = todays_cafes[0]
todays_cafe_address = CAFES[todays_cafe]['address']
update_history(lunch_history, today, todays_cafe)
print('\nRecommendation:\n\n %s, %s' % (todays_cafe, todays_cafe_address))
formatted_cafes = ', '.join(todays_cafes[0:5]) + '\n' + ', '.join(todays_cafes[5:-1])
print('\nAll lunch in preferred order:\n\n %s' % (formatted_cafes))
|
apache-2.0
| 6,199,370,958,018,314,000 | 41.205128 | 222 | 0.697898 | false | 2.774444 | false | false | false |
unicefuganda/edtrac
|
edtrac_project/rapidsms_xforms_src/pavement.py
|
1
|
2360
|
# -*- Import: -*-
from paver.easy import *
from paver.setuputils import setup
from setuptools import find_packages
try:
# Optional tasks, only needed for development
# -*- Optional import: -*-
from github.tools.task import *
import paver.doctools
import paver.virtual
import paver.misctasks
ALL_TASKS_LOADED = True
except ImportError, e:
info("some tasks could not not be imported.")
debug(str(e))
ALL_TASKS_LOADED = False
version = '0.1'
classifiers = [
# Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers
"Development Status :: 1 - Planning",
]
install_requires = [
# -*- Install requires: -*-
'setuptools',
]
entry_points="""
# -*- Entry points: -*-
"""
# compatible with distutils of python 2.3+ or later
setup(
name='rapidsms-xforms',
version=version,
description='Provides an interactive form builder and xform compatibility for RapidSMS.',
long_description=open('README.rst', 'r').read(),
classifiers=classifiers,
keywords='rapidsms xforms',
author='Nic Pottier',
author_email='[email protected]',
url='',
license='BSD',
packages = find_packages(exclude=['bootstrap', 'pavement',]),
include_package_data=True,
test_suite='nose.collector',
zip_safe=False,
install_requires=install_requires,
entry_points=entry_points,
)
options(
# -*- Paver options: -*-
minilib=Bunch(
extra_files=[
# -*- Minilib extra files: -*-
]
),
sphinx=Bunch(
docroot='docs',
builddir="_build",
sourcedir=""
),
virtualenv=Bunch(
packages_to_install=[
# -*- Virtualenv packages to install: -*-
'github-tools',
"nose",
"Sphinx>=0.6b1",
"pkginfo",
"virtualenv"],
dest_dir='./virtual-env/',
install_paver=True,
script_name='bootstrap.py',
paver_command_line=None
),
)
options.setup.package_data=paver.setuputils.find_package_data(
'rapidsms_xforms', package='rapidsms_xforms', only_in_packages=False)
if ALL_TASKS_LOADED:
@task
@needs('generate_setup', 'minilib', 'setuptools.command.sdist')
def sdist():
"""Overrides sdist to make sure that our setup.py is generated."""
|
bsd-3-clause
| -8,912,364,466,401,702,000 | 24.934066 | 93 | 0.607203 | false | 3.676012 | false | false | false |
brendangregg/bcc
|
examples/networking/http_filter/http-parse-complete.py
|
1
|
11542
|
#!/usr/bin/python
#
# Bertrone Matteo - Polytechnic of Turin
# November 2015
#
# eBPF application that parses HTTP packets
# and extracts (and prints on screen) the URL
# contained in the GET/POST request.
#
# eBPF program http_filter is used as SOCKET_FILTER attached to eth0 interface.
# Only packets of type ip and tcp containing HTTP GET/POST are
# returned to userspace, others dropped
#
# Python script uses bcc BPF Compiler Collection by
# iovisor (https://github.com/iovisor/bcc) and prints on stdout the first
# line of the HTTP GET/POST request containing the url
from __future__ import print_function
from bcc import BPF
from sys import argv
import socket
import os
import binascii
import time
CLEANUP_N_PACKETS = 50 # cleanup every CLEANUP_N_PACKETS packets received
MAX_URL_STRING_LEN = 8192 # max url string len (usually 8K)
MAX_AGE_SECONDS = 30 # max age entry in bpf_sessions map
# convert a bin string into a string of hex char
# helper function to print raw packet in hex
def toHex(s):
lst = ""
for ch in s:
hv = hex(ch).replace('0x', '')
if len(hv) == 1:
hv = '0' + hv
lst = lst + hv
return lst
# print str until CR+LF
def printUntilCRLF(s):
print(s.split(b'\r\n')[0].decode())
# cleanup function
def cleanup():
# get current time in seconds
current_time = int(time.time())
# looking for leaf having:
# timestap == 0 --> update with current timestamp
# AGE > MAX_AGE_SECONDS --> delete item
for key, leaf in bpf_sessions.items():
try:
current_leaf = bpf_sessions[key]
# set timestamp if timestamp == 0
if (current_leaf.timestamp == 0):
bpf_sessions[key] = bpf_sessions.Leaf(current_time)
else:
# delete older entries
if (current_time - current_leaf.timestamp > MAX_AGE_SECONDS):
del bpf_sessions[key]
except:
print("cleanup exception.")
return
# args
def usage():
print("USAGE: %s [-i <if_name>]" % argv[0])
print("")
print("Try '%s -h' for more options." % argv[0])
exit()
# help
def help():
print("USAGE: %s [-i <if_name>]" % argv[0])
print("")
print("optional arguments:")
print(" -h print this help")
print(" -i if_name select interface if_name. Default is eth0")
print("")
print("examples:")
print(" http-parse # bind socket to eth0")
print(" http-parse -i wlan0 # bind socket to wlan0")
exit()
# arguments
interface = "eth0"
if len(argv) == 2:
if str(argv[1]) == '-h':
help()
else:
usage()
if len(argv) == 3:
if str(argv[1]) == '-i':
interface = argv[2]
else:
usage()
if len(argv) > 3:
usage()
print("binding socket to '%s'" % interface)
# initialize BPF - load source code from http-parse-complete.c
bpf = BPF(src_file="http-parse-complete.c", debug=0)
# load eBPF program http_filter of type SOCKET_FILTER into the kernel eBPF vm
# more info about eBPF program types
# http://man7.org/linux/man-pages/man2/bpf.2.html
function_http_filter = bpf.load_func("http_filter", BPF.SOCKET_FILTER)
# create raw socket, bind it to interface
# attach bpf program to socket created
BPF.attach_raw_socket(function_http_filter, interface)
# get file descriptor of the socket previously
# created inside BPF.attach_raw_socket
socket_fd = function_http_filter.sock
# create python socket object, from the file descriptor
sock = socket.fromfd(socket_fd, socket.PF_PACKET,
socket.SOCK_RAW, socket.IPPROTO_IP)
# set it as blocking socket
sock.setblocking(True)
# get pointer to bpf map of type hash
bpf_sessions = bpf.get_table("sessions")
# packets counter
packet_count = 0
# dictionary containing association
# <key(ipsrc,ipdst,portsrc,portdst),payload_string>.
# if url is not entirely contained in only one packet,
# save the firt part of it in this local dict
# when I find \r\n in a next pkt, append and print the whole url
local_dictionary = {}
while 1:
# retrieve raw packet from socket
packet_str = os.read(socket_fd, 4096) # set packet length to max packet length on the interface
packet_count += 1
# DEBUG - print raw packet in hex format
# packet_hex = toHex(packet_str)
# print ("%s" % packet_hex)
# convert packet into bytearray
packet_bytearray = bytearray(packet_str)
# ethernet header length
ETH_HLEN = 14
# IP HEADER
# https://tools.ietf.org/html/rfc791
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# |Version| IHL |Type of Service| Total Length |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
#
# IHL : Internet Header Length is the length of the internet header
# value to multiply * 4 byte
# e.g. IHL = 5 ; IP Header Length = 5 * 4 byte = 20 byte
#
# Total length: This 16-bit field defines the entire packet size,
# including header and data, in bytes.
# calculate packet total length
total_length = packet_bytearray[ETH_HLEN + 2] # load MSB
total_length = total_length << 8 # shift MSB
total_length = total_length + packet_bytearray[ETH_HLEN + 3] # add LSB
# calculate ip header length
ip_header_length = packet_bytearray[ETH_HLEN] # load Byte
ip_header_length = ip_header_length & 0x0F # mask bits 0..3
ip_header_length = ip_header_length << 2 # shift to obtain length
# retrieve ip source/dest
ip_src_str = packet_str[ETH_HLEN + 12: ETH_HLEN + 16] # ip source offset 12..15
ip_dst_str = packet_str[ETH_HLEN + 16:ETH_HLEN + 20] # ip dest offset 16..19
ip_src = int(toHex(ip_src_str), 16)
ip_dst = int(toHex(ip_dst_str), 16)
# TCP HEADER
# https://www.rfc-editor.org/rfc/rfc793.txt
# 12 13 14 15
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | Data | |U|A|P|R|S|F| |
# | Offset| Reserved |R|C|S|S|Y|I| Window |
# | | |G|K|H|T|N|N| |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
#
# Data Offset: This indicates where the data begins.
# The TCP header is an integral number of 32 bits long.
# value to multiply * 4 byte
# e.g. DataOffset = 5 ; TCP Header Length = 5 * 4 byte = 20 byte
# calculate tcp header length
tcp_header_length = packet_bytearray[ETH_HLEN + ip_header_length + 12] # load Byte
tcp_header_length = tcp_header_length & 0xF0 # mask bit 4..7
tcp_header_length = tcp_header_length >> 2 # SHR 4 ; SHL 2 -> SHR 2
# retrieve port source/dest
port_src_str = packet_str[ETH_HLEN + ip_header_length:ETH_HLEN + ip_header_length + 2]
port_dst_str = packet_str[ETH_HLEN + ip_header_length + 2:ETH_HLEN + ip_header_length + 4]
port_src = int(toHex(port_src_str), 16)
port_dst = int(toHex(port_dst_str), 16)
# calculate payload offset
payload_offset = ETH_HLEN + ip_header_length + tcp_header_length
# payload_string contains only packet payload
payload_string = packet_str[(payload_offset):(len(packet_bytearray))]
# CR + LF (substring to find)
crlf = b'\r\n'
# current_Key contains ip source/dest and port source/map
# useful for direct bpf_sessions map access
current_Key = bpf_sessions.Key(ip_src, ip_dst, port_src, port_dst)
# looking for HTTP GET/POST request
if ((payload_string[:3] == b'GET') or (payload_string[:4] == b'POST')
or (payload_string[:4] == b'HTTP') or (payload_string[:3] == b'PUT')
or (payload_string[:6] == b'DELETE') or (payload_string[:4] == b'HEAD')):
# match: HTTP GET/POST packet found
if (crlf in payload_string):
# url entirely contained in first packet -> print it all
printUntilCRLF(payload_string)
# delete current_Key from bpf_sessions, url already printed.
# current session not useful anymore
try:
del bpf_sessions[current_Key]
except:
print("error during delete from bpf map ")
else:
# url NOT entirely contained in first packet
# not found \r\n in payload.
# save current part of the payload_string in dictionary
# <key(ips,ipd,ports,portd),payload_string>
local_dictionary[binascii.hexlify(current_Key)] = payload_string
else:
# NO match: HTTP GET/POST NOT found
# check if the packet belong to a session saved in bpf_sessions
if (current_Key in bpf_sessions):
# check id the packet belong to a session saved in local_dictionary
# (local_dictionary maintains HTTP GET/POST url not
# printed yet because split in N packets)
if (binascii.hexlify(current_Key) in local_dictionary):
# first part of the HTTP GET/POST url is already present in
# local dictionary (prev_payload_string)
prev_payload_string = local_dictionary[binascii.hexlify(current_Key)]
# looking for CR+LF in current packet.
if (crlf in payload_string):
# last packet. containing last part of HTTP GET/POST
# url split in N packets. Append current payload
prev_payload_string += payload_string
# print HTTP GET/POST url
printUntilCRLF(prev_payload_string)
# clean bpf_sessions & local_dictionary
try:
del bpf_sessions[current_Key]
del local_dictionary[binascii.hexlify(current_Key)]
except:
print("error deleting from map or dictionary")
else:
# NOT last packet. Containing part of HTTP GET/POST url
# split in N packets.
# Append current payload
prev_payload_string += payload_string
# check if not size exceeding
# (usually HTTP GET/POST url < 8K )
if (len(prev_payload_string) > MAX_URL_STRING_LEN):
print("url too long")
try:
del bpf_sessions[current_Key]
del local_dictionary[binascii.hexlify(current_Key)]
except:
print("error deleting from map or dict")
# update dictionary
local_dictionary[binascii.hexlify(current_Key)] = prev_payload_string
else:
# first part of the HTTP GET/POST url is
# NOT present in local dictionary
# bpf_sessions contains invalid entry -> delete it
try:
del bpf_sessions[current_Key]
except:
print("error del bpf_session")
# check if dirty entry are present in bpf_sessions
if (((packet_count) % CLEANUP_N_PACKETS) == 0):
cleanup()
|
apache-2.0
| 4,341,704,507,636,344,300 | 36.718954 | 100 | 0.574164 | false | 3.809241 | false | false | false |
maximencia/python_traning
|
fixture/application.py
|
1
|
1783
|
# -*- coding: utf-8 -*-
__author__ = 'Maxim.Rumyantsev'
#from selenium.webdriver.firefox.webdriver import WebDriver
from selenium import webdriver
from fixture.session_f import SessionHelper
from fixture.group_f import GroupHelper
from fixture.contact_f import ContactHelper
class Application:
# проверка валидности фикстуры через возврат url
def fixture_is_valid(self):
try:
self.wd.current_url
return True
except:
return False
def __init__(self,browser, base_url):
# параметр отвечает за запуск браузера бля тестов
if browser=="firefox":
self.wd = webdriver.Firefox()
elif browser=="chrome":
self.wd = webdriver.Chrome()
elif browser=="ie":
self.wd = webdriver.Ie()
else:
raise ValueError("Unrecognized browser %s" %browser)
#self.wd.implicitly_wait(1)
self.session = SessionHelper(self)
self.group = GroupHelper(self)
self.contact = ContactHelper(self)
self.base_url= base_url
# заполнение тестовых полей name- имя поля; send_keys_parameters - текст для заполнения
def fill_text_field(self,name,send_keys_parameters):
wd = self.wd
if send_keys_parameters is not None:
wd.find_element_by_name(name).click()
wd.find_element_by_name(name).clear()
wd.find_element_by_name(name).send_keys(send_keys_parameters)
def open_home_page(self):
wd = self.wd
wd.get(self.base_url)
def destroy(self):
self.wd.quit()
|
apache-2.0
| 7,473,063,612,288,819,000 | 27.052632 | 89 | 0.605072 | false | 3.285714 | false | false | false |
googleapis/googleapis-gen
|
google/ads/googleads/v8/googleads-py/google/ads/googleads/v8/enums/types/location_group_radius_units.py
|
1
|
1225
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package='google.ads.googleads.v8.enums',
marshal='google.ads.googleads.v8',
manifest={
'LocationGroupRadiusUnitsEnum',
},
)
class LocationGroupRadiusUnitsEnum(proto.Message):
r"""Container for enum describing unit of radius in location
group.
"""
class LocationGroupRadiusUnits(proto.Enum):
r"""The unit of radius distance in location group (e.g. MILES)"""
UNSPECIFIED = 0
UNKNOWN = 1
METERS = 2
MILES = 3
MILLI_MILES = 4
__all__ = tuple(sorted(__protobuf__.manifest))
|
apache-2.0
| 5,528,833,102,422,046,000 | 28.878049 | 74 | 0.685714 | false | 3.913738 | false | false | false |
ArcherSys/ArcherSys
|
Lib/test/test_dis.py
|
1
|
124487
|
<<<<<<< HEAD
<<<<<<< HEAD
# Minimal tests for dis module
from test.support import run_unittest, captured_stdout
from test.bytecode_helper import BytecodeTestCase
import difflib
import unittest
import sys
import dis
import io
import re
import types
import contextlib
def get_tb():
def _error():
try:
1 / 0
except Exception as e:
tb = e.__traceback__
return tb
tb = _error()
while tb.tb_next:
tb = tb.tb_next
return tb
TRACEBACK_CODE = get_tb().tb_frame.f_code
class _C:
def __init__(self, x):
self.x = x == 1
dis_c_instance_method = """\
%-4d 0 LOAD_FAST 1 (x)
3 LOAD_CONST 1 (1)
6 COMPARE_OP 2 (==)
9 LOAD_FAST 0 (self)
12 STORE_ATTR 0 (x)
15 LOAD_CONST 0 (None)
18 RETURN_VALUE
""" % (_C.__init__.__code__.co_firstlineno + 1,)
dis_c_instance_method_bytes = """\
0 LOAD_FAST 1 (1)
3 LOAD_CONST 1 (1)
6 COMPARE_OP 2 (==)
9 LOAD_FAST 0 (0)
12 STORE_ATTR 0 (0)
15 LOAD_CONST 0 (0)
18 RETURN_VALUE
"""
def _f(a):
print(a)
return 1
dis_f = """\
%-4d 0 LOAD_GLOBAL 0 (print)
3 LOAD_FAST 0 (a)
6 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
9 POP_TOP
%-4d 10 LOAD_CONST 1 (1)
13 RETURN_VALUE
""" % (_f.__code__.co_firstlineno + 1,
_f.__code__.co_firstlineno + 2)
dis_f_co_code = """\
0 LOAD_GLOBAL 0 (0)
3 LOAD_FAST 0 (0)
6 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
9 POP_TOP
10 LOAD_CONST 1 (1)
13 RETURN_VALUE
"""
def bug708901():
for res in range(1,
10):
pass
dis_bug708901 = """\
%-4d 0 SETUP_LOOP 23 (to 26)
3 LOAD_GLOBAL 0 (range)
6 LOAD_CONST 1 (1)
%-4d 9 LOAD_CONST 2 (10)
12 CALL_FUNCTION 2 (2 positional, 0 keyword pair)
15 GET_ITER
>> 16 FOR_ITER 6 (to 25)
19 STORE_FAST 0 (res)
%-4d 22 JUMP_ABSOLUTE 16
>> 25 POP_BLOCK
>> 26 LOAD_CONST 0 (None)
29 RETURN_VALUE
""" % (bug708901.__code__.co_firstlineno + 1,
bug708901.__code__.co_firstlineno + 2,
bug708901.__code__.co_firstlineno + 3)
def bug1333982(x=[]):
assert 0, ([s for s in x] +
1)
pass
dis_bug1333982 = """\
%3d 0 LOAD_CONST 1 (0)
3 POP_JUMP_IF_TRUE 35
6 LOAD_GLOBAL 0 (AssertionError)
9 LOAD_CONST 2 (<code object <listcomp> at 0x..., file "%s", line %d>)
12 LOAD_CONST 3 ('bug1333982.<locals>.<listcomp>')
15 MAKE_FUNCTION 0
18 LOAD_FAST 0 (x)
21 GET_ITER
22 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
%3d 25 LOAD_CONST 4 (1)
28 BINARY_ADD
29 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
32 RAISE_VARARGS 1
%3d >> 35 LOAD_CONST 0 (None)
38 RETURN_VALUE
""" % (bug1333982.__code__.co_firstlineno + 1,
__file__,
bug1333982.__code__.co_firstlineno + 1,
bug1333982.__code__.co_firstlineno + 2,
bug1333982.__code__.co_firstlineno + 3)
_BIG_LINENO_FORMAT = """\
%3d 0 LOAD_GLOBAL 0 (spam)
3 POP_TOP
4 LOAD_CONST 0 (None)
7 RETURN_VALUE
"""
dis_module_expected_results = """\
Disassembly of f:
4 0 LOAD_CONST 0 (None)
3 RETURN_VALUE
Disassembly of g:
5 0 LOAD_CONST 0 (None)
3 RETURN_VALUE
"""
expr_str = "x + 1"
dis_expr_str = """\
1 0 LOAD_NAME 0 (x)
3 LOAD_CONST 0 (1)
6 BINARY_ADD
7 RETURN_VALUE
"""
simple_stmt_str = "x = x + 1"
dis_simple_stmt_str = """\
1 0 LOAD_NAME 0 (x)
3 LOAD_CONST 0 (1)
6 BINARY_ADD
7 STORE_NAME 0 (x)
10 LOAD_CONST 1 (None)
13 RETURN_VALUE
"""
compound_stmt_str = """\
x = 0
while 1:
x += 1"""
# Trailing newline has been deliberately omitted
dis_compound_stmt_str = """\
1 0 LOAD_CONST 0 (0)
3 STORE_NAME 0 (x)
2 6 SETUP_LOOP 14 (to 23)
3 >> 9 LOAD_NAME 0 (x)
12 LOAD_CONST 1 (1)
15 INPLACE_ADD
16 STORE_NAME 0 (x)
19 JUMP_ABSOLUTE 9
22 POP_BLOCK
>> 23 LOAD_CONST 2 (None)
26 RETURN_VALUE
"""
dis_traceback = """\
%-4d 0 SETUP_EXCEPT 12 (to 15)
%-4d 3 LOAD_CONST 1 (1)
6 LOAD_CONST 2 (0)
--> 9 BINARY_TRUE_DIVIDE
10 POP_TOP
11 POP_BLOCK
12 JUMP_FORWARD 46 (to 61)
%-4d >> 15 DUP_TOP
16 LOAD_GLOBAL 0 (Exception)
19 COMPARE_OP 10 (exception match)
22 POP_JUMP_IF_FALSE 60
25 POP_TOP
26 STORE_FAST 0 (e)
29 POP_TOP
30 SETUP_FINALLY 14 (to 47)
%-4d 33 LOAD_FAST 0 (e)
36 LOAD_ATTR 1 (__traceback__)
39 STORE_FAST 1 (tb)
42 POP_BLOCK
43 POP_EXCEPT
44 LOAD_CONST 0 (None)
>> 47 LOAD_CONST 0 (None)
50 STORE_FAST 0 (e)
53 DELETE_FAST 0 (e)
56 END_FINALLY
57 JUMP_FORWARD 1 (to 61)
>> 60 END_FINALLY
%-4d >> 61 LOAD_FAST 1 (tb)
64 RETURN_VALUE
""" % (TRACEBACK_CODE.co_firstlineno + 1,
TRACEBACK_CODE.co_firstlineno + 2,
TRACEBACK_CODE.co_firstlineno + 3,
TRACEBACK_CODE.co_firstlineno + 4,
TRACEBACK_CODE.co_firstlineno + 5)
class DisTests(unittest.TestCase):
def get_disassembly(self, func, lasti=-1, wrapper=True):
# We want to test the default printing behaviour, not the file arg
output = io.StringIO()
with contextlib.redirect_stdout(output):
if wrapper:
dis.dis(func)
else:
dis.disassemble(func, lasti)
return output.getvalue()
def get_disassemble_as_string(self, func, lasti=-1):
return self.get_disassembly(func, lasti, False)
def strip_addresses(self, text):
return re.sub(r'\b0x[0-9A-Fa-f]+\b', '0x...', text)
def do_disassembly_test(self, func, expected):
got = self.get_disassembly(func)
if got != expected:
got = self.strip_addresses(got)
self.assertEqual(got, expected)
def test_opmap(self):
self.assertEqual(dis.opmap["NOP"], 9)
self.assertIn(dis.opmap["LOAD_CONST"], dis.hasconst)
self.assertIn(dis.opmap["STORE_NAME"], dis.hasname)
def test_opname(self):
self.assertEqual(dis.opname[dis.opmap["LOAD_FAST"]], "LOAD_FAST")
def test_boundaries(self):
self.assertEqual(dis.opmap["EXTENDED_ARG"], dis.EXTENDED_ARG)
self.assertEqual(dis.opmap["STORE_NAME"], dis.HAVE_ARGUMENT)
def test_dis(self):
self.do_disassembly_test(_f, dis_f)
def test_bug_708901(self):
self.do_disassembly_test(bug708901, dis_bug708901)
def test_bug_1333982(self):
# This one is checking bytecodes generated for an `assert` statement,
# so fails if the tests are run with -O. Skip this test then.
if not __debug__:
self.skipTest('need asserts, run without -O')
self.do_disassembly_test(bug1333982, dis_bug1333982)
def test_big_linenos(self):
def func(count):
namespace = {}
func = "def foo():\n " + "".join(["\n "] * count + ["spam\n"])
exec(func, namespace)
return namespace['foo']
# Test all small ranges
for i in range(1, 300):
expected = _BIG_LINENO_FORMAT % (i + 2)
self.do_disassembly_test(func(i), expected)
# Test some larger ranges too
for i in range(300, 5000, 10):
expected = _BIG_LINENO_FORMAT % (i + 2)
self.do_disassembly_test(func(i), expected)
from test import dis_module
self.do_disassembly_test(dis_module, dis_module_expected_results)
def test_disassemble_str(self):
self.do_disassembly_test(expr_str, dis_expr_str)
self.do_disassembly_test(simple_stmt_str, dis_simple_stmt_str)
self.do_disassembly_test(compound_stmt_str, dis_compound_stmt_str)
def test_disassemble_bytes(self):
self.do_disassembly_test(_f.__code__.co_code, dis_f_co_code)
def test_disassemble_method(self):
self.do_disassembly_test(_C(1).__init__, dis_c_instance_method)
def test_disassemble_method_bytes(self):
method_bytecode = _C(1).__init__.__code__.co_code
self.do_disassembly_test(method_bytecode, dis_c_instance_method_bytes)
def test_dis_none(self):
try:
del sys.last_traceback
except AttributeError:
pass
self.assertRaises(RuntimeError, dis.dis, None)
def test_dis_traceback(self):
try:
del sys.last_traceback
except AttributeError:
pass
try:
1/0
except Exception as e:
tb = e.__traceback__
sys.last_traceback = tb
tb_dis = self.get_disassemble_as_string(tb.tb_frame.f_code, tb.tb_lasti)
self.do_disassembly_test(None, tb_dis)
def test_dis_object(self):
self.assertRaises(TypeError, dis.dis, object())
class DisWithFileTests(DisTests):
# Run the tests again, using the file arg instead of print
def get_disassembly(self, func, lasti=-1, wrapper=True):
output = io.StringIO()
if wrapper:
dis.dis(func, file=output)
else:
dis.disassemble(func, lasti, file=output)
return output.getvalue()
code_info_code_info = """\
Name: code_info
Filename: (.*)
Argument count: 1
Kw-only arguments: 0
Number of locals: 1
Stack size: 3
Flags: OPTIMIZED, NEWLOCALS, NOFREE
Constants:
0: %r
Names:
0: _format_code_info
1: _get_code_object
Variable names:
0: x""" % (('Formatted details of methods, functions, or code.',)
if sys.flags.optimize < 2 else (None,))
@staticmethod
def tricky(x, y, z=True, *args, c, d, e=[], **kwds):
def f(c=c):
print(x, y, z, c, d, e, f)
yield x, y, z, c, d, e, f
code_info_tricky = """\
Name: tricky
Filename: (.*)
Argument count: 3
Kw-only arguments: 3
Number of locals: 8
Stack size: 7
Flags: OPTIMIZED, NEWLOCALS, VARARGS, VARKEYWORDS, GENERATOR
Constants:
0: None
1: <code object f at (.*), file "(.*)", line (.*)>
2: 'tricky.<locals>.f'
Variable names:
0: x
1: y
2: z
3: c
4: d
5: e
6: args
7: kwds
Cell variables:
0: [edfxyz]
1: [edfxyz]
2: [edfxyz]
3: [edfxyz]
4: [edfxyz]
5: [edfxyz]"""
# NOTE: the order of the cell variables above depends on dictionary order!
co_tricky_nested_f = tricky.__func__.__code__.co_consts[1]
code_info_tricky_nested_f = """\
Name: f
Filename: (.*)
Argument count: 1
Kw-only arguments: 0
Number of locals: 1
Stack size: 8
Flags: OPTIMIZED, NEWLOCALS, NESTED
Constants:
0: None
Names:
0: print
Variable names:
0: c
Free variables:
0: [edfxyz]
1: [edfxyz]
2: [edfxyz]
3: [edfxyz]
4: [edfxyz]
5: [edfxyz]"""
code_info_expr_str = """\
Name: <module>
Filename: <disassembly>
Argument count: 0
Kw-only arguments: 0
Number of locals: 0
Stack size: 2
Flags: NOFREE
Constants:
0: 1
Names:
0: x"""
code_info_simple_stmt_str = """\
Name: <module>
Filename: <disassembly>
Argument count: 0
Kw-only arguments: 0
Number of locals: 0
Stack size: 2
Flags: NOFREE
Constants:
0: 1
1: None
Names:
0: x"""
code_info_compound_stmt_str = """\
Name: <module>
Filename: <disassembly>
Argument count: 0
Kw-only arguments: 0
Number of locals: 0
Stack size: 2
Flags: NOFREE
Constants:
0: 0
1: 1
2: None
Names:
0: x"""
class CodeInfoTests(unittest.TestCase):
test_pairs = [
(dis.code_info, code_info_code_info),
(tricky, code_info_tricky),
(co_tricky_nested_f, code_info_tricky_nested_f),
(expr_str, code_info_expr_str),
(simple_stmt_str, code_info_simple_stmt_str),
(compound_stmt_str, code_info_compound_stmt_str),
]
def test_code_info(self):
self.maxDiff = 1000
for x, expected in self.test_pairs:
self.assertRegex(dis.code_info(x), expected)
def test_show_code(self):
self.maxDiff = 1000
for x, expected in self.test_pairs:
with captured_stdout() as output:
dis.show_code(x)
self.assertRegex(output.getvalue(), expected+"\n")
output = io.StringIO()
dis.show_code(x, file=output)
self.assertRegex(output.getvalue(), expected)
def test_code_info_object(self):
self.assertRaises(TypeError, dis.code_info, object())
def test_pretty_flags_no_flags(self):
self.assertEqual(dis.pretty_flags(0), '0x0')
# Fodder for instruction introspection tests
# Editing any of these may require recalculating the expected output
def outer(a=1, b=2):
def f(c=3, d=4):
def inner(e=5, f=6):
print(a, b, c, d, e, f)
print(a, b, c, d)
return inner
print(a, b, '', 1, [], {}, "Hello world!")
return f
def jumpy():
# This won't actually run (but that's OK, we only disassemble it)
for i in range(10):
print(i)
if i < 4:
continue
if i > 6:
break
else:
print("I can haz else clause?")
while i:
print(i)
i -= 1
if i > 6:
continue
if i < 4:
break
else:
print("Who let lolcatz into this test suite?")
try:
1 / 0
except ZeroDivisionError:
print("Here we go, here we go, here we go...")
else:
with i as dodgy:
print("Never reach this")
finally:
print("OK, now we're done")
# End fodder for opinfo generation tests
expected_outer_line = 1
_line_offset = outer.__code__.co_firstlineno - 1
code_object_f = outer.__code__.co_consts[3]
expected_f_line = code_object_f.co_firstlineno - _line_offset
code_object_inner = code_object_f.co_consts[3]
expected_inner_line = code_object_inner.co_firstlineno - _line_offset
expected_jumpy_line = 1
# The following lines are useful to regenerate the expected results after
# either the fodder is modified or the bytecode generation changes
# After regeneration, update the references to code_object_f and
# code_object_inner before rerunning the tests
#_instructions = dis.get_instructions(outer, first_line=expected_outer_line)
#print('expected_opinfo_outer = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
#_instructions = dis.get_instructions(outer(), first_line=expected_outer_line)
#print('expected_opinfo_f = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
#_instructions = dis.get_instructions(outer()(), first_line=expected_outer_line)
#print('expected_opinfo_inner = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
#_instructions = dis.get_instructions(jumpy, first_line=expected_jumpy_line)
#print('expected_opinfo_jumpy = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
Instruction = dis.Instruction
expected_opinfo_outer = [
Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=3, argrepr='3', offset=0, starts_line=2, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=0, argval='a', argrepr='a', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=1, argval='b', argrepr='b', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_TUPLE', opcode=102, arg=2, argval=2, argrepr='', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=code_object_f, argrepr=repr(code_object_f), offset=15, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='outer.<locals>.f', argrepr="'outer.<locals>.f'", offset=18, starts_line=None, is_jump_target=False),
Instruction(opname='MAKE_CLOSURE', opcode=134, arg=2, argval=2, argrepr='', offset=21, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=2, argval='f', argrepr='f', offset=24, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='print', argrepr='print', offset=27, starts_line=7, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=0, argval='a', argrepr='a', offset=30, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=1, argval='b', argrepr='b', offset=33, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval='', argrepr="''", offset=36, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=6, argval=1, argrepr='1', offset=39, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_LIST', opcode=103, arg=0, argval=0, argrepr='', offset=42, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_MAP', opcode=105, arg=0, argval=0, argrepr='', offset=45, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=7, argval='Hello world!', argrepr="'Hello world!'", offset=48, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=7, argval=7, argrepr='7 positional, 0 keyword pair', offset=51, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=54, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=2, argval='f', argrepr='f', offset=55, starts_line=8, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=58, starts_line=None, is_jump_target=False),
]
expected_opinfo_f = [
Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=5, argrepr='5', offset=0, starts_line=3, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=6, argrepr='6', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=2, argval='a', argrepr='a', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=3, argval='b', argrepr='b', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=0, argval='c', argrepr='c', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=1, argval='d', argrepr='d', offset=15, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_TUPLE', opcode=102, arg=4, argval=4, argrepr='', offset=18, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=code_object_inner, argrepr=repr(code_object_inner), offset=21, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='outer.<locals>.f.<locals>.inner', argrepr="'outer.<locals>.f.<locals>.inner'", offset=24, starts_line=None, is_jump_target=False),
Instruction(opname='MAKE_CLOSURE', opcode=134, arg=2, argval=2, argrepr='', offset=27, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=2, argval='inner', argrepr='inner', offset=30, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='print', argrepr='print', offset=33, starts_line=5, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=2, argval='a', argrepr='a', offset=36, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=3, argval='b', argrepr='b', offset=39, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=0, argval='c', argrepr='c', offset=42, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=1, argval='d', argrepr='d', offset=45, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=4, argval=4, argrepr='4 positional, 0 keyword pair', offset=48, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=51, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=2, argval='inner', argrepr='inner', offset=52, starts_line=6, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=55, starts_line=None, is_jump_target=False),
]
expected_opinfo_inner = [
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='print', argrepr='print', offset=0, starts_line=4, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=0, argval='a', argrepr='a', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=1, argval='b', argrepr='b', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=2, argval='c', argrepr='c', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=3, argval='d', argrepr='d', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='e', argrepr='e', offset=15, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=1, argval='f', argrepr='f', offset=18, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=6, argval=6, argrepr='6 positional, 0 keyword pair', offset=21, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=24, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=25, starts_line=None, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=28, starts_line=None, is_jump_target=False),
]
expected_opinfo_jumpy = [
Instruction(opname='SETUP_LOOP', opcode=120, arg=74, argval=77, argrepr='to 77', offset=0, starts_line=3, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='range', argrepr='range', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=10, argrepr='10', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='GET_ITER', opcode=68, arg=None, argval=None, argrepr='', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='FOR_ITER', opcode=93, arg=50, argval=66, argrepr='to 66', offset=13, starts_line=None, is_jump_target=True),
Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=16, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=19, starts_line=4, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=22, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=25, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=28, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=29, starts_line=5, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=32, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=0, argval='<', argrepr='<', offset=35, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=47, argval=47, argrepr='', offset=38, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=13, argval=13, argrepr='', offset=41, starts_line=6, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=0, argval=47, argrepr='to 47', offset=44, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=47, starts_line=7, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=6, argrepr='6', offset=50, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=4, argval='>', argrepr='>', offset=53, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=13, argval=13, argrepr='', offset=56, starts_line=None, is_jump_target=False),
Instruction(opname='BREAK_LOOP', opcode=80, arg=None, argval=None, argrepr='', offset=59, starts_line=8, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=13, argval=13, argrepr='', offset=60, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=13, argval=13, argrepr='', offset=63, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=66, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=67, starts_line=10, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='I can haz else clause?', argrepr="'I can haz else clause?'", offset=70, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=73, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=76, starts_line=None, is_jump_target=False),
Instruction(opname='SETUP_LOOP', opcode=120, arg=74, argval=154, argrepr='to 154', offset=77, starts_line=11, is_jump_target=True),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=80, starts_line=None, is_jump_target=True),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=143, argval=143, argrepr='', offset=83, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=86, starts_line=12, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=89, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=92, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=95, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=96, starts_line=13, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=99, starts_line=None, is_jump_target=False),
Instruction(opname='INPLACE_SUBTRACT', opcode=56, arg=None, argval=None, argrepr='', offset=102, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=103, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=106, starts_line=14, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=6, argrepr='6', offset=109, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=4, argval='>', argrepr='>', offset=112, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=124, argval=124, argrepr='', offset=115, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=80, argval=80, argrepr='', offset=118, starts_line=15, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=0, argval=124, argrepr='to 124', offset=121, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=124, starts_line=16, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=127, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=0, argval='<', argrepr='<', offset=130, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=80, argval=80, argrepr='', offset=133, starts_line=None, is_jump_target=False),
Instruction(opname='BREAK_LOOP', opcode=80, arg=None, argval=None, argrepr='', offset=136, starts_line=17, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=80, argval=80, argrepr='', offset=137, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=80, argval=80, argrepr='', offset=140, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=143, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=144, starts_line=19, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=6, argval='Who let lolcatz into this test suite?', argrepr="'Who let lolcatz into this test suite?'", offset=147, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=150, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=153, starts_line=None, is_jump_target=False),
Instruction(opname='SETUP_FINALLY', opcode=122, arg=72, argval=229, argrepr='to 229', offset=154, starts_line=20, is_jump_target=True),
Instruction(opname='SETUP_EXCEPT', opcode=121, arg=12, argval=172, argrepr='to 172', offset=157, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=160, starts_line=21, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=7, argval=0, argrepr='0', offset=163, starts_line=None, is_jump_target=False),
Instruction(opname='BINARY_TRUE_DIVIDE', opcode=27, arg=None, argval=None, argrepr='', offset=166, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=167, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=168, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=28, argval=200, argrepr='to 200', offset=169, starts_line=None, is_jump_target=False),
Instruction(opname='DUP_TOP', opcode=4, arg=None, argval=None, argrepr='', offset=172, starts_line=22, is_jump_target=True),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=2, argval='ZeroDivisionError', argrepr='ZeroDivisionError', offset=173, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=10, argval='exception match', argrepr='exception match', offset=176, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=199, argval=199, argrepr='', offset=179, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=182, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=183, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=184, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=185, starts_line=23, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=8, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=188, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=191, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=194, starts_line=None, is_jump_target=False),
Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=195, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=26, argval=225, argrepr='to 225', offset=196, starts_line=None, is_jump_target=False),
Instruction(opname='END_FINALLY', opcode=88, arg=None, argval=None, argrepr='', offset=199, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=200, starts_line=25, is_jump_target=True),
Instruction(opname='SETUP_WITH', opcode=143, arg=17, argval=223, argrepr='to 223', offset=203, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=1, argval='dodgy', argrepr='dodgy', offset=206, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=209, starts_line=26, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=9, argval='Never reach this', argrepr="'Never reach this'", offset=212, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=215, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=218, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=219, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=220, starts_line=None, is_jump_target=False),
Instruction(opname='WITH_CLEANUP', opcode=81, arg=None, argval=None, argrepr='', offset=223, starts_line=None, is_jump_target=True),
Instruction(opname='END_FINALLY', opcode=88, arg=None, argval=None, argrepr='', offset=224, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=225, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=226, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=229, starts_line=28, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=232, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=235, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=238, starts_line=None, is_jump_target=False),
Instruction(opname='END_FINALLY', opcode=88, arg=None, argval=None, argrepr='', offset=239, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=240, starts_line=None, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=243, starts_line=None, is_jump_target=False),
]
# One last piece of inspect fodder to check the default line number handling
def simple(): pass
expected_opinfo_simple = [
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=0, starts_line=simple.__code__.co_firstlineno, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=3, starts_line=None, is_jump_target=False)
]
class InstructionTests(BytecodeTestCase):
def test_default_first_line(self):
actual = dis.get_instructions(simple)
self.assertEqual(list(actual), expected_opinfo_simple)
def test_first_line_set_to_None(self):
actual = dis.get_instructions(simple, first_line=None)
self.assertEqual(list(actual), expected_opinfo_simple)
def test_outer(self):
actual = dis.get_instructions(outer, first_line=expected_outer_line)
self.assertEqual(list(actual), expected_opinfo_outer)
def test_nested(self):
with captured_stdout():
f = outer()
actual = dis.get_instructions(f, first_line=expected_f_line)
self.assertEqual(list(actual), expected_opinfo_f)
def test_doubly_nested(self):
with captured_stdout():
inner = outer()()
actual = dis.get_instructions(inner, first_line=expected_inner_line)
self.assertEqual(list(actual), expected_opinfo_inner)
def test_jumpy(self):
actual = dis.get_instructions(jumpy, first_line=expected_jumpy_line)
self.assertEqual(list(actual), expected_opinfo_jumpy)
# get_instructions has its own tests above, so can rely on it to validate
# the object oriented API
class BytecodeTests(unittest.TestCase):
def test_instantiation(self):
# Test with function, method, code string and code object
for obj in [_f, _C(1).__init__, "a=1", _f.__code__]:
with self.subTest(obj=obj):
b = dis.Bytecode(obj)
self.assertIsInstance(b.codeobj, types.CodeType)
self.assertRaises(TypeError, dis.Bytecode, object())
def test_iteration(self):
for obj in [_f, _C(1).__init__, "a=1", _f.__code__]:
with self.subTest(obj=obj):
via_object = list(dis.Bytecode(obj))
via_generator = list(dis.get_instructions(obj))
self.assertEqual(via_object, via_generator)
def test_explicit_first_line(self):
actual = dis.Bytecode(outer, first_line=expected_outer_line)
self.assertEqual(list(actual), expected_opinfo_outer)
def test_source_line_in_disassembly(self):
# Use the line in the source code
actual = dis.Bytecode(simple).dis()[:3]
expected = "{:>3}".format(simple.__code__.co_firstlineno)
self.assertEqual(actual, expected)
# Use an explicit first line number
actual = dis.Bytecode(simple, first_line=350).dis()[:3]
self.assertEqual(actual, "350")
def test_info(self):
self.maxDiff = 1000
for x, expected in CodeInfoTests.test_pairs:
b = dis.Bytecode(x)
self.assertRegex(b.info(), expected)
def test_disassembled(self):
actual = dis.Bytecode(_f).dis()
self.assertEqual(actual, dis_f)
def test_from_traceback(self):
tb = get_tb()
b = dis.Bytecode.from_traceback(tb)
while tb.tb_next: tb = tb.tb_next
self.assertEqual(b.current_offset, tb.tb_lasti)
def test_from_traceback_dis(self):
tb = get_tb()
b = dis.Bytecode.from_traceback(tb)
self.assertEqual(b.dis(), dis_traceback)
if __name__ == "__main__":
unittest.main()
=======
# Minimal tests for dis module
from test.support import run_unittest, captured_stdout
from test.bytecode_helper import BytecodeTestCase
import difflib
import unittest
import sys
import dis
import io
import re
import types
import contextlib
def get_tb():
def _error():
try:
1 / 0
except Exception as e:
tb = e.__traceback__
return tb
tb = _error()
while tb.tb_next:
tb = tb.tb_next
return tb
TRACEBACK_CODE = get_tb().tb_frame.f_code
class _C:
def __init__(self, x):
self.x = x == 1
dis_c_instance_method = """\
%-4d 0 LOAD_FAST 1 (x)
3 LOAD_CONST 1 (1)
6 COMPARE_OP 2 (==)
9 LOAD_FAST 0 (self)
12 STORE_ATTR 0 (x)
15 LOAD_CONST 0 (None)
18 RETURN_VALUE
""" % (_C.__init__.__code__.co_firstlineno + 1,)
dis_c_instance_method_bytes = """\
0 LOAD_FAST 1 (1)
3 LOAD_CONST 1 (1)
6 COMPARE_OP 2 (==)
9 LOAD_FAST 0 (0)
12 STORE_ATTR 0 (0)
15 LOAD_CONST 0 (0)
18 RETURN_VALUE
"""
def _f(a):
print(a)
return 1
dis_f = """\
%-4d 0 LOAD_GLOBAL 0 (print)
3 LOAD_FAST 0 (a)
6 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
9 POP_TOP
%-4d 10 LOAD_CONST 1 (1)
13 RETURN_VALUE
""" % (_f.__code__.co_firstlineno + 1,
_f.__code__.co_firstlineno + 2)
dis_f_co_code = """\
0 LOAD_GLOBAL 0 (0)
3 LOAD_FAST 0 (0)
6 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
9 POP_TOP
10 LOAD_CONST 1 (1)
13 RETURN_VALUE
"""
def bug708901():
for res in range(1,
10):
pass
dis_bug708901 = """\
%-4d 0 SETUP_LOOP 23 (to 26)
3 LOAD_GLOBAL 0 (range)
6 LOAD_CONST 1 (1)
%-4d 9 LOAD_CONST 2 (10)
12 CALL_FUNCTION 2 (2 positional, 0 keyword pair)
15 GET_ITER
>> 16 FOR_ITER 6 (to 25)
19 STORE_FAST 0 (res)
%-4d 22 JUMP_ABSOLUTE 16
>> 25 POP_BLOCK
>> 26 LOAD_CONST 0 (None)
29 RETURN_VALUE
""" % (bug708901.__code__.co_firstlineno + 1,
bug708901.__code__.co_firstlineno + 2,
bug708901.__code__.co_firstlineno + 3)
def bug1333982(x=[]):
assert 0, ([s for s in x] +
1)
pass
dis_bug1333982 = """\
%3d 0 LOAD_CONST 1 (0)
3 POP_JUMP_IF_TRUE 35
6 LOAD_GLOBAL 0 (AssertionError)
9 LOAD_CONST 2 (<code object <listcomp> at 0x..., file "%s", line %d>)
12 LOAD_CONST 3 ('bug1333982.<locals>.<listcomp>')
15 MAKE_FUNCTION 0
18 LOAD_FAST 0 (x)
21 GET_ITER
22 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
%3d 25 LOAD_CONST 4 (1)
28 BINARY_ADD
29 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
32 RAISE_VARARGS 1
%3d >> 35 LOAD_CONST 0 (None)
38 RETURN_VALUE
""" % (bug1333982.__code__.co_firstlineno + 1,
__file__,
bug1333982.__code__.co_firstlineno + 1,
bug1333982.__code__.co_firstlineno + 2,
bug1333982.__code__.co_firstlineno + 3)
_BIG_LINENO_FORMAT = """\
%3d 0 LOAD_GLOBAL 0 (spam)
3 POP_TOP
4 LOAD_CONST 0 (None)
7 RETURN_VALUE
"""
dis_module_expected_results = """\
Disassembly of f:
4 0 LOAD_CONST 0 (None)
3 RETURN_VALUE
Disassembly of g:
5 0 LOAD_CONST 0 (None)
3 RETURN_VALUE
"""
expr_str = "x + 1"
dis_expr_str = """\
1 0 LOAD_NAME 0 (x)
3 LOAD_CONST 0 (1)
6 BINARY_ADD
7 RETURN_VALUE
"""
simple_stmt_str = "x = x + 1"
dis_simple_stmt_str = """\
1 0 LOAD_NAME 0 (x)
3 LOAD_CONST 0 (1)
6 BINARY_ADD
7 STORE_NAME 0 (x)
10 LOAD_CONST 1 (None)
13 RETURN_VALUE
"""
compound_stmt_str = """\
x = 0
while 1:
x += 1"""
# Trailing newline has been deliberately omitted
dis_compound_stmt_str = """\
1 0 LOAD_CONST 0 (0)
3 STORE_NAME 0 (x)
2 6 SETUP_LOOP 14 (to 23)
3 >> 9 LOAD_NAME 0 (x)
12 LOAD_CONST 1 (1)
15 INPLACE_ADD
16 STORE_NAME 0 (x)
19 JUMP_ABSOLUTE 9
22 POP_BLOCK
>> 23 LOAD_CONST 2 (None)
26 RETURN_VALUE
"""
dis_traceback = """\
%-4d 0 SETUP_EXCEPT 12 (to 15)
%-4d 3 LOAD_CONST 1 (1)
6 LOAD_CONST 2 (0)
--> 9 BINARY_TRUE_DIVIDE
10 POP_TOP
11 POP_BLOCK
12 JUMP_FORWARD 46 (to 61)
%-4d >> 15 DUP_TOP
16 LOAD_GLOBAL 0 (Exception)
19 COMPARE_OP 10 (exception match)
22 POP_JUMP_IF_FALSE 60
25 POP_TOP
26 STORE_FAST 0 (e)
29 POP_TOP
30 SETUP_FINALLY 14 (to 47)
%-4d 33 LOAD_FAST 0 (e)
36 LOAD_ATTR 1 (__traceback__)
39 STORE_FAST 1 (tb)
42 POP_BLOCK
43 POP_EXCEPT
44 LOAD_CONST 0 (None)
>> 47 LOAD_CONST 0 (None)
50 STORE_FAST 0 (e)
53 DELETE_FAST 0 (e)
56 END_FINALLY
57 JUMP_FORWARD 1 (to 61)
>> 60 END_FINALLY
%-4d >> 61 LOAD_FAST 1 (tb)
64 RETURN_VALUE
""" % (TRACEBACK_CODE.co_firstlineno + 1,
TRACEBACK_CODE.co_firstlineno + 2,
TRACEBACK_CODE.co_firstlineno + 3,
TRACEBACK_CODE.co_firstlineno + 4,
TRACEBACK_CODE.co_firstlineno + 5)
class DisTests(unittest.TestCase):
def get_disassembly(self, func, lasti=-1, wrapper=True):
# We want to test the default printing behaviour, not the file arg
output = io.StringIO()
with contextlib.redirect_stdout(output):
if wrapper:
dis.dis(func)
else:
dis.disassemble(func, lasti)
return output.getvalue()
def get_disassemble_as_string(self, func, lasti=-1):
return self.get_disassembly(func, lasti, False)
def strip_addresses(self, text):
return re.sub(r'\b0x[0-9A-Fa-f]+\b', '0x...', text)
def do_disassembly_test(self, func, expected):
got = self.get_disassembly(func)
if got != expected:
got = self.strip_addresses(got)
self.assertEqual(got, expected)
def test_opmap(self):
self.assertEqual(dis.opmap["NOP"], 9)
self.assertIn(dis.opmap["LOAD_CONST"], dis.hasconst)
self.assertIn(dis.opmap["STORE_NAME"], dis.hasname)
def test_opname(self):
self.assertEqual(dis.opname[dis.opmap["LOAD_FAST"]], "LOAD_FAST")
def test_boundaries(self):
self.assertEqual(dis.opmap["EXTENDED_ARG"], dis.EXTENDED_ARG)
self.assertEqual(dis.opmap["STORE_NAME"], dis.HAVE_ARGUMENT)
def test_dis(self):
self.do_disassembly_test(_f, dis_f)
def test_bug_708901(self):
self.do_disassembly_test(bug708901, dis_bug708901)
def test_bug_1333982(self):
# This one is checking bytecodes generated for an `assert` statement,
# so fails if the tests are run with -O. Skip this test then.
if not __debug__:
self.skipTest('need asserts, run without -O')
self.do_disassembly_test(bug1333982, dis_bug1333982)
def test_big_linenos(self):
def func(count):
namespace = {}
func = "def foo():\n " + "".join(["\n "] * count + ["spam\n"])
exec(func, namespace)
return namespace['foo']
# Test all small ranges
for i in range(1, 300):
expected = _BIG_LINENO_FORMAT % (i + 2)
self.do_disassembly_test(func(i), expected)
# Test some larger ranges too
for i in range(300, 5000, 10):
expected = _BIG_LINENO_FORMAT % (i + 2)
self.do_disassembly_test(func(i), expected)
from test import dis_module
self.do_disassembly_test(dis_module, dis_module_expected_results)
def test_disassemble_str(self):
self.do_disassembly_test(expr_str, dis_expr_str)
self.do_disassembly_test(simple_stmt_str, dis_simple_stmt_str)
self.do_disassembly_test(compound_stmt_str, dis_compound_stmt_str)
def test_disassemble_bytes(self):
self.do_disassembly_test(_f.__code__.co_code, dis_f_co_code)
def test_disassemble_method(self):
self.do_disassembly_test(_C(1).__init__, dis_c_instance_method)
def test_disassemble_method_bytes(self):
method_bytecode = _C(1).__init__.__code__.co_code
self.do_disassembly_test(method_bytecode, dis_c_instance_method_bytes)
def test_dis_none(self):
try:
del sys.last_traceback
except AttributeError:
pass
self.assertRaises(RuntimeError, dis.dis, None)
def test_dis_traceback(self):
try:
del sys.last_traceback
except AttributeError:
pass
try:
1/0
except Exception as e:
tb = e.__traceback__
sys.last_traceback = tb
tb_dis = self.get_disassemble_as_string(tb.tb_frame.f_code, tb.tb_lasti)
self.do_disassembly_test(None, tb_dis)
def test_dis_object(self):
self.assertRaises(TypeError, dis.dis, object())
class DisWithFileTests(DisTests):
# Run the tests again, using the file arg instead of print
def get_disassembly(self, func, lasti=-1, wrapper=True):
output = io.StringIO()
if wrapper:
dis.dis(func, file=output)
else:
dis.disassemble(func, lasti, file=output)
return output.getvalue()
code_info_code_info = """\
Name: code_info
Filename: (.*)
Argument count: 1
Kw-only arguments: 0
Number of locals: 1
Stack size: 3
Flags: OPTIMIZED, NEWLOCALS, NOFREE
Constants:
0: %r
Names:
0: _format_code_info
1: _get_code_object
Variable names:
0: x""" % (('Formatted details of methods, functions, or code.',)
if sys.flags.optimize < 2 else (None,))
@staticmethod
def tricky(x, y, z=True, *args, c, d, e=[], **kwds):
def f(c=c):
print(x, y, z, c, d, e, f)
yield x, y, z, c, d, e, f
code_info_tricky = """\
Name: tricky
Filename: (.*)
Argument count: 3
Kw-only arguments: 3
Number of locals: 8
Stack size: 7
Flags: OPTIMIZED, NEWLOCALS, VARARGS, VARKEYWORDS, GENERATOR
Constants:
0: None
1: <code object f at (.*), file "(.*)", line (.*)>
2: 'tricky.<locals>.f'
Variable names:
0: x
1: y
2: z
3: c
4: d
5: e
6: args
7: kwds
Cell variables:
0: [edfxyz]
1: [edfxyz]
2: [edfxyz]
3: [edfxyz]
4: [edfxyz]
5: [edfxyz]"""
# NOTE: the order of the cell variables above depends on dictionary order!
co_tricky_nested_f = tricky.__func__.__code__.co_consts[1]
code_info_tricky_nested_f = """\
Name: f
Filename: (.*)
Argument count: 1
Kw-only arguments: 0
Number of locals: 1
Stack size: 8
Flags: OPTIMIZED, NEWLOCALS, NESTED
Constants:
0: None
Names:
0: print
Variable names:
0: c
Free variables:
0: [edfxyz]
1: [edfxyz]
2: [edfxyz]
3: [edfxyz]
4: [edfxyz]
5: [edfxyz]"""
code_info_expr_str = """\
Name: <module>
Filename: <disassembly>
Argument count: 0
Kw-only arguments: 0
Number of locals: 0
Stack size: 2
Flags: NOFREE
Constants:
0: 1
Names:
0: x"""
code_info_simple_stmt_str = """\
Name: <module>
Filename: <disassembly>
Argument count: 0
Kw-only arguments: 0
Number of locals: 0
Stack size: 2
Flags: NOFREE
Constants:
0: 1
1: None
Names:
0: x"""
code_info_compound_stmt_str = """\
Name: <module>
Filename: <disassembly>
Argument count: 0
Kw-only arguments: 0
Number of locals: 0
Stack size: 2
Flags: NOFREE
Constants:
0: 0
1: 1
2: None
Names:
0: x"""
class CodeInfoTests(unittest.TestCase):
test_pairs = [
(dis.code_info, code_info_code_info),
(tricky, code_info_tricky),
(co_tricky_nested_f, code_info_tricky_nested_f),
(expr_str, code_info_expr_str),
(simple_stmt_str, code_info_simple_stmt_str),
(compound_stmt_str, code_info_compound_stmt_str),
]
def test_code_info(self):
self.maxDiff = 1000
for x, expected in self.test_pairs:
self.assertRegex(dis.code_info(x), expected)
def test_show_code(self):
self.maxDiff = 1000
for x, expected in self.test_pairs:
with captured_stdout() as output:
dis.show_code(x)
self.assertRegex(output.getvalue(), expected+"\n")
output = io.StringIO()
dis.show_code(x, file=output)
self.assertRegex(output.getvalue(), expected)
def test_code_info_object(self):
self.assertRaises(TypeError, dis.code_info, object())
def test_pretty_flags_no_flags(self):
self.assertEqual(dis.pretty_flags(0), '0x0')
# Fodder for instruction introspection tests
# Editing any of these may require recalculating the expected output
def outer(a=1, b=2):
def f(c=3, d=4):
def inner(e=5, f=6):
print(a, b, c, d, e, f)
print(a, b, c, d)
return inner
print(a, b, '', 1, [], {}, "Hello world!")
return f
def jumpy():
# This won't actually run (but that's OK, we only disassemble it)
for i in range(10):
print(i)
if i < 4:
continue
if i > 6:
break
else:
print("I can haz else clause?")
while i:
print(i)
i -= 1
if i > 6:
continue
if i < 4:
break
else:
print("Who let lolcatz into this test suite?")
try:
1 / 0
except ZeroDivisionError:
print("Here we go, here we go, here we go...")
else:
with i as dodgy:
print("Never reach this")
finally:
print("OK, now we're done")
# End fodder for opinfo generation tests
expected_outer_line = 1
_line_offset = outer.__code__.co_firstlineno - 1
code_object_f = outer.__code__.co_consts[3]
expected_f_line = code_object_f.co_firstlineno - _line_offset
code_object_inner = code_object_f.co_consts[3]
expected_inner_line = code_object_inner.co_firstlineno - _line_offset
expected_jumpy_line = 1
# The following lines are useful to regenerate the expected results after
# either the fodder is modified or the bytecode generation changes
# After regeneration, update the references to code_object_f and
# code_object_inner before rerunning the tests
#_instructions = dis.get_instructions(outer, first_line=expected_outer_line)
#print('expected_opinfo_outer = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
#_instructions = dis.get_instructions(outer(), first_line=expected_outer_line)
#print('expected_opinfo_f = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
#_instructions = dis.get_instructions(outer()(), first_line=expected_outer_line)
#print('expected_opinfo_inner = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
#_instructions = dis.get_instructions(jumpy, first_line=expected_jumpy_line)
#print('expected_opinfo_jumpy = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
Instruction = dis.Instruction
expected_opinfo_outer = [
Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=3, argrepr='3', offset=0, starts_line=2, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=0, argval='a', argrepr='a', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=1, argval='b', argrepr='b', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_TUPLE', opcode=102, arg=2, argval=2, argrepr='', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=code_object_f, argrepr=repr(code_object_f), offset=15, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='outer.<locals>.f', argrepr="'outer.<locals>.f'", offset=18, starts_line=None, is_jump_target=False),
Instruction(opname='MAKE_CLOSURE', opcode=134, arg=2, argval=2, argrepr='', offset=21, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=2, argval='f', argrepr='f', offset=24, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='print', argrepr='print', offset=27, starts_line=7, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=0, argval='a', argrepr='a', offset=30, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=1, argval='b', argrepr='b', offset=33, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval='', argrepr="''", offset=36, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=6, argval=1, argrepr='1', offset=39, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_LIST', opcode=103, arg=0, argval=0, argrepr='', offset=42, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_MAP', opcode=105, arg=0, argval=0, argrepr='', offset=45, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=7, argval='Hello world!', argrepr="'Hello world!'", offset=48, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=7, argval=7, argrepr='7 positional, 0 keyword pair', offset=51, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=54, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=2, argval='f', argrepr='f', offset=55, starts_line=8, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=58, starts_line=None, is_jump_target=False),
]
expected_opinfo_f = [
Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=5, argrepr='5', offset=0, starts_line=3, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=6, argrepr='6', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=2, argval='a', argrepr='a', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=3, argval='b', argrepr='b', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=0, argval='c', argrepr='c', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=1, argval='d', argrepr='d', offset=15, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_TUPLE', opcode=102, arg=4, argval=4, argrepr='', offset=18, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=code_object_inner, argrepr=repr(code_object_inner), offset=21, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='outer.<locals>.f.<locals>.inner', argrepr="'outer.<locals>.f.<locals>.inner'", offset=24, starts_line=None, is_jump_target=False),
Instruction(opname='MAKE_CLOSURE', opcode=134, arg=2, argval=2, argrepr='', offset=27, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=2, argval='inner', argrepr='inner', offset=30, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='print', argrepr='print', offset=33, starts_line=5, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=2, argval='a', argrepr='a', offset=36, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=3, argval='b', argrepr='b', offset=39, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=0, argval='c', argrepr='c', offset=42, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=1, argval='d', argrepr='d', offset=45, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=4, argval=4, argrepr='4 positional, 0 keyword pair', offset=48, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=51, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=2, argval='inner', argrepr='inner', offset=52, starts_line=6, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=55, starts_line=None, is_jump_target=False),
]
expected_opinfo_inner = [
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='print', argrepr='print', offset=0, starts_line=4, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=0, argval='a', argrepr='a', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=1, argval='b', argrepr='b', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=2, argval='c', argrepr='c', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=3, argval='d', argrepr='d', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='e', argrepr='e', offset=15, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=1, argval='f', argrepr='f', offset=18, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=6, argval=6, argrepr='6 positional, 0 keyword pair', offset=21, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=24, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=25, starts_line=None, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=28, starts_line=None, is_jump_target=False),
]
expected_opinfo_jumpy = [
Instruction(opname='SETUP_LOOP', opcode=120, arg=74, argval=77, argrepr='to 77', offset=0, starts_line=3, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='range', argrepr='range', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=10, argrepr='10', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='GET_ITER', opcode=68, arg=None, argval=None, argrepr='', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='FOR_ITER', opcode=93, arg=50, argval=66, argrepr='to 66', offset=13, starts_line=None, is_jump_target=True),
Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=16, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=19, starts_line=4, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=22, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=25, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=28, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=29, starts_line=5, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=32, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=0, argval='<', argrepr='<', offset=35, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=47, argval=47, argrepr='', offset=38, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=13, argval=13, argrepr='', offset=41, starts_line=6, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=0, argval=47, argrepr='to 47', offset=44, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=47, starts_line=7, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=6, argrepr='6', offset=50, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=4, argval='>', argrepr='>', offset=53, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=13, argval=13, argrepr='', offset=56, starts_line=None, is_jump_target=False),
Instruction(opname='BREAK_LOOP', opcode=80, arg=None, argval=None, argrepr='', offset=59, starts_line=8, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=13, argval=13, argrepr='', offset=60, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=13, argval=13, argrepr='', offset=63, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=66, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=67, starts_line=10, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='I can haz else clause?', argrepr="'I can haz else clause?'", offset=70, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=73, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=76, starts_line=None, is_jump_target=False),
Instruction(opname='SETUP_LOOP', opcode=120, arg=74, argval=154, argrepr='to 154', offset=77, starts_line=11, is_jump_target=True),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=80, starts_line=None, is_jump_target=True),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=143, argval=143, argrepr='', offset=83, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=86, starts_line=12, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=89, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=92, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=95, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=96, starts_line=13, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=99, starts_line=None, is_jump_target=False),
Instruction(opname='INPLACE_SUBTRACT', opcode=56, arg=None, argval=None, argrepr='', offset=102, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=103, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=106, starts_line=14, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=6, argrepr='6', offset=109, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=4, argval='>', argrepr='>', offset=112, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=124, argval=124, argrepr='', offset=115, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=80, argval=80, argrepr='', offset=118, starts_line=15, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=0, argval=124, argrepr='to 124', offset=121, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=124, starts_line=16, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=127, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=0, argval='<', argrepr='<', offset=130, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=80, argval=80, argrepr='', offset=133, starts_line=None, is_jump_target=False),
Instruction(opname='BREAK_LOOP', opcode=80, arg=None, argval=None, argrepr='', offset=136, starts_line=17, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=80, argval=80, argrepr='', offset=137, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=80, argval=80, argrepr='', offset=140, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=143, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=144, starts_line=19, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=6, argval='Who let lolcatz into this test suite?', argrepr="'Who let lolcatz into this test suite?'", offset=147, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=150, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=153, starts_line=None, is_jump_target=False),
Instruction(opname='SETUP_FINALLY', opcode=122, arg=72, argval=229, argrepr='to 229', offset=154, starts_line=20, is_jump_target=True),
Instruction(opname='SETUP_EXCEPT', opcode=121, arg=12, argval=172, argrepr='to 172', offset=157, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=160, starts_line=21, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=7, argval=0, argrepr='0', offset=163, starts_line=None, is_jump_target=False),
Instruction(opname='BINARY_TRUE_DIVIDE', opcode=27, arg=None, argval=None, argrepr='', offset=166, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=167, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=168, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=28, argval=200, argrepr='to 200', offset=169, starts_line=None, is_jump_target=False),
Instruction(opname='DUP_TOP', opcode=4, arg=None, argval=None, argrepr='', offset=172, starts_line=22, is_jump_target=True),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=2, argval='ZeroDivisionError', argrepr='ZeroDivisionError', offset=173, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=10, argval='exception match', argrepr='exception match', offset=176, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=199, argval=199, argrepr='', offset=179, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=182, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=183, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=184, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=185, starts_line=23, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=8, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=188, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=191, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=194, starts_line=None, is_jump_target=False),
Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=195, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=26, argval=225, argrepr='to 225', offset=196, starts_line=None, is_jump_target=False),
Instruction(opname='END_FINALLY', opcode=88, arg=None, argval=None, argrepr='', offset=199, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=200, starts_line=25, is_jump_target=True),
Instruction(opname='SETUP_WITH', opcode=143, arg=17, argval=223, argrepr='to 223', offset=203, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=1, argval='dodgy', argrepr='dodgy', offset=206, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=209, starts_line=26, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=9, argval='Never reach this', argrepr="'Never reach this'", offset=212, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=215, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=218, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=219, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=220, starts_line=None, is_jump_target=False),
Instruction(opname='WITH_CLEANUP', opcode=81, arg=None, argval=None, argrepr='', offset=223, starts_line=None, is_jump_target=True),
Instruction(opname='END_FINALLY', opcode=88, arg=None, argval=None, argrepr='', offset=224, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=225, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=226, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=229, starts_line=28, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=232, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=235, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=238, starts_line=None, is_jump_target=False),
Instruction(opname='END_FINALLY', opcode=88, arg=None, argval=None, argrepr='', offset=239, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=240, starts_line=None, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=243, starts_line=None, is_jump_target=False),
]
# One last piece of inspect fodder to check the default line number handling
def simple(): pass
expected_opinfo_simple = [
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=0, starts_line=simple.__code__.co_firstlineno, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=3, starts_line=None, is_jump_target=False)
]
class InstructionTests(BytecodeTestCase):
def test_default_first_line(self):
actual = dis.get_instructions(simple)
self.assertEqual(list(actual), expected_opinfo_simple)
def test_first_line_set_to_None(self):
actual = dis.get_instructions(simple, first_line=None)
self.assertEqual(list(actual), expected_opinfo_simple)
def test_outer(self):
actual = dis.get_instructions(outer, first_line=expected_outer_line)
self.assertEqual(list(actual), expected_opinfo_outer)
def test_nested(self):
with captured_stdout():
f = outer()
actual = dis.get_instructions(f, first_line=expected_f_line)
self.assertEqual(list(actual), expected_opinfo_f)
def test_doubly_nested(self):
with captured_stdout():
inner = outer()()
actual = dis.get_instructions(inner, first_line=expected_inner_line)
self.assertEqual(list(actual), expected_opinfo_inner)
def test_jumpy(self):
actual = dis.get_instructions(jumpy, first_line=expected_jumpy_line)
self.assertEqual(list(actual), expected_opinfo_jumpy)
# get_instructions has its own tests above, so can rely on it to validate
# the object oriented API
class BytecodeTests(unittest.TestCase):
def test_instantiation(self):
# Test with function, method, code string and code object
for obj in [_f, _C(1).__init__, "a=1", _f.__code__]:
with self.subTest(obj=obj):
b = dis.Bytecode(obj)
self.assertIsInstance(b.codeobj, types.CodeType)
self.assertRaises(TypeError, dis.Bytecode, object())
def test_iteration(self):
for obj in [_f, _C(1).__init__, "a=1", _f.__code__]:
with self.subTest(obj=obj):
via_object = list(dis.Bytecode(obj))
via_generator = list(dis.get_instructions(obj))
self.assertEqual(via_object, via_generator)
def test_explicit_first_line(self):
actual = dis.Bytecode(outer, first_line=expected_outer_line)
self.assertEqual(list(actual), expected_opinfo_outer)
def test_source_line_in_disassembly(self):
# Use the line in the source code
actual = dis.Bytecode(simple).dis()[:3]
expected = "{:>3}".format(simple.__code__.co_firstlineno)
self.assertEqual(actual, expected)
# Use an explicit first line number
actual = dis.Bytecode(simple, first_line=350).dis()[:3]
self.assertEqual(actual, "350")
def test_info(self):
self.maxDiff = 1000
for x, expected in CodeInfoTests.test_pairs:
b = dis.Bytecode(x)
self.assertRegex(b.info(), expected)
def test_disassembled(self):
actual = dis.Bytecode(_f).dis()
self.assertEqual(actual, dis_f)
def test_from_traceback(self):
tb = get_tb()
b = dis.Bytecode.from_traceback(tb)
while tb.tb_next: tb = tb.tb_next
self.assertEqual(b.current_offset, tb.tb_lasti)
def test_from_traceback_dis(self):
tb = get_tb()
b = dis.Bytecode.from_traceback(tb)
self.assertEqual(b.dis(), dis_traceback)
if __name__ == "__main__":
unittest.main()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
# Minimal tests for dis module
from test.support import run_unittest, captured_stdout
from test.bytecode_helper import BytecodeTestCase
import difflib
import unittest
import sys
import dis
import io
import re
import types
import contextlib
def get_tb():
def _error():
try:
1 / 0
except Exception as e:
tb = e.__traceback__
return tb
tb = _error()
while tb.tb_next:
tb = tb.tb_next
return tb
TRACEBACK_CODE = get_tb().tb_frame.f_code
class _C:
def __init__(self, x):
self.x = x == 1
dis_c_instance_method = """\
%-4d 0 LOAD_FAST 1 (x)
3 LOAD_CONST 1 (1)
6 COMPARE_OP 2 (==)
9 LOAD_FAST 0 (self)
12 STORE_ATTR 0 (x)
15 LOAD_CONST 0 (None)
18 RETURN_VALUE
""" % (_C.__init__.__code__.co_firstlineno + 1,)
dis_c_instance_method_bytes = """\
0 LOAD_FAST 1 (1)
3 LOAD_CONST 1 (1)
6 COMPARE_OP 2 (==)
9 LOAD_FAST 0 (0)
12 STORE_ATTR 0 (0)
15 LOAD_CONST 0 (0)
18 RETURN_VALUE
"""
def _f(a):
print(a)
return 1
dis_f = """\
%-4d 0 LOAD_GLOBAL 0 (print)
3 LOAD_FAST 0 (a)
6 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
9 POP_TOP
%-4d 10 LOAD_CONST 1 (1)
13 RETURN_VALUE
""" % (_f.__code__.co_firstlineno + 1,
_f.__code__.co_firstlineno + 2)
dis_f_co_code = """\
0 LOAD_GLOBAL 0 (0)
3 LOAD_FAST 0 (0)
6 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
9 POP_TOP
10 LOAD_CONST 1 (1)
13 RETURN_VALUE
"""
def bug708901():
for res in range(1,
10):
pass
dis_bug708901 = """\
%-4d 0 SETUP_LOOP 23 (to 26)
3 LOAD_GLOBAL 0 (range)
6 LOAD_CONST 1 (1)
%-4d 9 LOAD_CONST 2 (10)
12 CALL_FUNCTION 2 (2 positional, 0 keyword pair)
15 GET_ITER
>> 16 FOR_ITER 6 (to 25)
19 STORE_FAST 0 (res)
%-4d 22 JUMP_ABSOLUTE 16
>> 25 POP_BLOCK
>> 26 LOAD_CONST 0 (None)
29 RETURN_VALUE
""" % (bug708901.__code__.co_firstlineno + 1,
bug708901.__code__.co_firstlineno + 2,
bug708901.__code__.co_firstlineno + 3)
def bug1333982(x=[]):
assert 0, ([s for s in x] +
1)
pass
dis_bug1333982 = """\
%3d 0 LOAD_CONST 1 (0)
3 POP_JUMP_IF_TRUE 35
6 LOAD_GLOBAL 0 (AssertionError)
9 LOAD_CONST 2 (<code object <listcomp> at 0x..., file "%s", line %d>)
12 LOAD_CONST 3 ('bug1333982.<locals>.<listcomp>')
15 MAKE_FUNCTION 0
18 LOAD_FAST 0 (x)
21 GET_ITER
22 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
%3d 25 LOAD_CONST 4 (1)
28 BINARY_ADD
29 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
32 RAISE_VARARGS 1
%3d >> 35 LOAD_CONST 0 (None)
38 RETURN_VALUE
""" % (bug1333982.__code__.co_firstlineno + 1,
__file__,
bug1333982.__code__.co_firstlineno + 1,
bug1333982.__code__.co_firstlineno + 2,
bug1333982.__code__.co_firstlineno + 3)
_BIG_LINENO_FORMAT = """\
%3d 0 LOAD_GLOBAL 0 (spam)
3 POP_TOP
4 LOAD_CONST 0 (None)
7 RETURN_VALUE
"""
dis_module_expected_results = """\
Disassembly of f:
4 0 LOAD_CONST 0 (None)
3 RETURN_VALUE
Disassembly of g:
5 0 LOAD_CONST 0 (None)
3 RETURN_VALUE
"""
expr_str = "x + 1"
dis_expr_str = """\
1 0 LOAD_NAME 0 (x)
3 LOAD_CONST 0 (1)
6 BINARY_ADD
7 RETURN_VALUE
"""
simple_stmt_str = "x = x + 1"
dis_simple_stmt_str = """\
1 0 LOAD_NAME 0 (x)
3 LOAD_CONST 0 (1)
6 BINARY_ADD
7 STORE_NAME 0 (x)
10 LOAD_CONST 1 (None)
13 RETURN_VALUE
"""
compound_stmt_str = """\
x = 0
while 1:
x += 1"""
# Trailing newline has been deliberately omitted
dis_compound_stmt_str = """\
1 0 LOAD_CONST 0 (0)
3 STORE_NAME 0 (x)
2 6 SETUP_LOOP 14 (to 23)
3 >> 9 LOAD_NAME 0 (x)
12 LOAD_CONST 1 (1)
15 INPLACE_ADD
16 STORE_NAME 0 (x)
19 JUMP_ABSOLUTE 9
22 POP_BLOCK
>> 23 LOAD_CONST 2 (None)
26 RETURN_VALUE
"""
dis_traceback = """\
%-4d 0 SETUP_EXCEPT 12 (to 15)
%-4d 3 LOAD_CONST 1 (1)
6 LOAD_CONST 2 (0)
--> 9 BINARY_TRUE_DIVIDE
10 POP_TOP
11 POP_BLOCK
12 JUMP_FORWARD 46 (to 61)
%-4d >> 15 DUP_TOP
16 LOAD_GLOBAL 0 (Exception)
19 COMPARE_OP 10 (exception match)
22 POP_JUMP_IF_FALSE 60
25 POP_TOP
26 STORE_FAST 0 (e)
29 POP_TOP
30 SETUP_FINALLY 14 (to 47)
%-4d 33 LOAD_FAST 0 (e)
36 LOAD_ATTR 1 (__traceback__)
39 STORE_FAST 1 (tb)
42 POP_BLOCK
43 POP_EXCEPT
44 LOAD_CONST 0 (None)
>> 47 LOAD_CONST 0 (None)
50 STORE_FAST 0 (e)
53 DELETE_FAST 0 (e)
56 END_FINALLY
57 JUMP_FORWARD 1 (to 61)
>> 60 END_FINALLY
%-4d >> 61 LOAD_FAST 1 (tb)
64 RETURN_VALUE
""" % (TRACEBACK_CODE.co_firstlineno + 1,
TRACEBACK_CODE.co_firstlineno + 2,
TRACEBACK_CODE.co_firstlineno + 3,
TRACEBACK_CODE.co_firstlineno + 4,
TRACEBACK_CODE.co_firstlineno + 5)
class DisTests(unittest.TestCase):
def get_disassembly(self, func, lasti=-1, wrapper=True):
# We want to test the default printing behaviour, not the file arg
output = io.StringIO()
with contextlib.redirect_stdout(output):
if wrapper:
dis.dis(func)
else:
dis.disassemble(func, lasti)
return output.getvalue()
def get_disassemble_as_string(self, func, lasti=-1):
return self.get_disassembly(func, lasti, False)
def strip_addresses(self, text):
return re.sub(r'\b0x[0-9A-Fa-f]+\b', '0x...', text)
def do_disassembly_test(self, func, expected):
got = self.get_disassembly(func)
if got != expected:
got = self.strip_addresses(got)
self.assertEqual(got, expected)
def test_opmap(self):
self.assertEqual(dis.opmap["NOP"], 9)
self.assertIn(dis.opmap["LOAD_CONST"], dis.hasconst)
self.assertIn(dis.opmap["STORE_NAME"], dis.hasname)
def test_opname(self):
self.assertEqual(dis.opname[dis.opmap["LOAD_FAST"]], "LOAD_FAST")
def test_boundaries(self):
self.assertEqual(dis.opmap["EXTENDED_ARG"], dis.EXTENDED_ARG)
self.assertEqual(dis.opmap["STORE_NAME"], dis.HAVE_ARGUMENT)
def test_dis(self):
self.do_disassembly_test(_f, dis_f)
def test_bug_708901(self):
self.do_disassembly_test(bug708901, dis_bug708901)
def test_bug_1333982(self):
# This one is checking bytecodes generated for an `assert` statement,
# so fails if the tests are run with -O. Skip this test then.
if not __debug__:
self.skipTest('need asserts, run without -O')
self.do_disassembly_test(bug1333982, dis_bug1333982)
def test_big_linenos(self):
def func(count):
namespace = {}
func = "def foo():\n " + "".join(["\n "] * count + ["spam\n"])
exec(func, namespace)
return namespace['foo']
# Test all small ranges
for i in range(1, 300):
expected = _BIG_LINENO_FORMAT % (i + 2)
self.do_disassembly_test(func(i), expected)
# Test some larger ranges too
for i in range(300, 5000, 10):
expected = _BIG_LINENO_FORMAT % (i + 2)
self.do_disassembly_test(func(i), expected)
from test import dis_module
self.do_disassembly_test(dis_module, dis_module_expected_results)
def test_disassemble_str(self):
self.do_disassembly_test(expr_str, dis_expr_str)
self.do_disassembly_test(simple_stmt_str, dis_simple_stmt_str)
self.do_disassembly_test(compound_stmt_str, dis_compound_stmt_str)
def test_disassemble_bytes(self):
self.do_disassembly_test(_f.__code__.co_code, dis_f_co_code)
def test_disassemble_method(self):
self.do_disassembly_test(_C(1).__init__, dis_c_instance_method)
def test_disassemble_method_bytes(self):
method_bytecode = _C(1).__init__.__code__.co_code
self.do_disassembly_test(method_bytecode, dis_c_instance_method_bytes)
def test_dis_none(self):
try:
del sys.last_traceback
except AttributeError:
pass
self.assertRaises(RuntimeError, dis.dis, None)
def test_dis_traceback(self):
try:
del sys.last_traceback
except AttributeError:
pass
try:
1/0
except Exception as e:
tb = e.__traceback__
sys.last_traceback = tb
tb_dis = self.get_disassemble_as_string(tb.tb_frame.f_code, tb.tb_lasti)
self.do_disassembly_test(None, tb_dis)
def test_dis_object(self):
self.assertRaises(TypeError, dis.dis, object())
class DisWithFileTests(DisTests):
# Run the tests again, using the file arg instead of print
def get_disassembly(self, func, lasti=-1, wrapper=True):
output = io.StringIO()
if wrapper:
dis.dis(func, file=output)
else:
dis.disassemble(func, lasti, file=output)
return output.getvalue()
code_info_code_info = """\
Name: code_info
Filename: (.*)
Argument count: 1
Kw-only arguments: 0
Number of locals: 1
Stack size: 3
Flags: OPTIMIZED, NEWLOCALS, NOFREE
Constants:
0: %r
Names:
0: _format_code_info
1: _get_code_object
Variable names:
0: x""" % (('Formatted details of methods, functions, or code.',)
if sys.flags.optimize < 2 else (None,))
@staticmethod
def tricky(x, y, z=True, *args, c, d, e=[], **kwds):
def f(c=c):
print(x, y, z, c, d, e, f)
yield x, y, z, c, d, e, f
code_info_tricky = """\
Name: tricky
Filename: (.*)
Argument count: 3
Kw-only arguments: 3
Number of locals: 8
Stack size: 7
Flags: OPTIMIZED, NEWLOCALS, VARARGS, VARKEYWORDS, GENERATOR
Constants:
0: None
1: <code object f at (.*), file "(.*)", line (.*)>
2: 'tricky.<locals>.f'
Variable names:
0: x
1: y
2: z
3: c
4: d
5: e
6: args
7: kwds
Cell variables:
0: [edfxyz]
1: [edfxyz]
2: [edfxyz]
3: [edfxyz]
4: [edfxyz]
5: [edfxyz]"""
# NOTE: the order of the cell variables above depends on dictionary order!
co_tricky_nested_f = tricky.__func__.__code__.co_consts[1]
code_info_tricky_nested_f = """\
Name: f
Filename: (.*)
Argument count: 1
Kw-only arguments: 0
Number of locals: 1
Stack size: 8
Flags: OPTIMIZED, NEWLOCALS, NESTED
Constants:
0: None
Names:
0: print
Variable names:
0: c
Free variables:
0: [edfxyz]
1: [edfxyz]
2: [edfxyz]
3: [edfxyz]
4: [edfxyz]
5: [edfxyz]"""
code_info_expr_str = """\
Name: <module>
Filename: <disassembly>
Argument count: 0
Kw-only arguments: 0
Number of locals: 0
Stack size: 2
Flags: NOFREE
Constants:
0: 1
Names:
0: x"""
code_info_simple_stmt_str = """\
Name: <module>
Filename: <disassembly>
Argument count: 0
Kw-only arguments: 0
Number of locals: 0
Stack size: 2
Flags: NOFREE
Constants:
0: 1
1: None
Names:
0: x"""
code_info_compound_stmt_str = """\
Name: <module>
Filename: <disassembly>
Argument count: 0
Kw-only arguments: 0
Number of locals: 0
Stack size: 2
Flags: NOFREE
Constants:
0: 0
1: 1
2: None
Names:
0: x"""
class CodeInfoTests(unittest.TestCase):
test_pairs = [
(dis.code_info, code_info_code_info),
(tricky, code_info_tricky),
(co_tricky_nested_f, code_info_tricky_nested_f),
(expr_str, code_info_expr_str),
(simple_stmt_str, code_info_simple_stmt_str),
(compound_stmt_str, code_info_compound_stmt_str),
]
def test_code_info(self):
self.maxDiff = 1000
for x, expected in self.test_pairs:
self.assertRegex(dis.code_info(x), expected)
def test_show_code(self):
self.maxDiff = 1000
for x, expected in self.test_pairs:
with captured_stdout() as output:
dis.show_code(x)
self.assertRegex(output.getvalue(), expected+"\n")
output = io.StringIO()
dis.show_code(x, file=output)
self.assertRegex(output.getvalue(), expected)
def test_code_info_object(self):
self.assertRaises(TypeError, dis.code_info, object())
def test_pretty_flags_no_flags(self):
self.assertEqual(dis.pretty_flags(0), '0x0')
# Fodder for instruction introspection tests
# Editing any of these may require recalculating the expected output
def outer(a=1, b=2):
def f(c=3, d=4):
def inner(e=5, f=6):
print(a, b, c, d, e, f)
print(a, b, c, d)
return inner
print(a, b, '', 1, [], {}, "Hello world!")
return f
def jumpy():
# This won't actually run (but that's OK, we only disassemble it)
for i in range(10):
print(i)
if i < 4:
continue
if i > 6:
break
else:
print("I can haz else clause?")
while i:
print(i)
i -= 1
if i > 6:
continue
if i < 4:
break
else:
print("Who let lolcatz into this test suite?")
try:
1 / 0
except ZeroDivisionError:
print("Here we go, here we go, here we go...")
else:
with i as dodgy:
print("Never reach this")
finally:
print("OK, now we're done")
# End fodder for opinfo generation tests
expected_outer_line = 1
_line_offset = outer.__code__.co_firstlineno - 1
code_object_f = outer.__code__.co_consts[3]
expected_f_line = code_object_f.co_firstlineno - _line_offset
code_object_inner = code_object_f.co_consts[3]
expected_inner_line = code_object_inner.co_firstlineno - _line_offset
expected_jumpy_line = 1
# The following lines are useful to regenerate the expected results after
# either the fodder is modified or the bytecode generation changes
# After regeneration, update the references to code_object_f and
# code_object_inner before rerunning the tests
#_instructions = dis.get_instructions(outer, first_line=expected_outer_line)
#print('expected_opinfo_outer = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
#_instructions = dis.get_instructions(outer(), first_line=expected_outer_line)
#print('expected_opinfo_f = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
#_instructions = dis.get_instructions(outer()(), first_line=expected_outer_line)
#print('expected_opinfo_inner = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
#_instructions = dis.get_instructions(jumpy, first_line=expected_jumpy_line)
#print('expected_opinfo_jumpy = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
Instruction = dis.Instruction
expected_opinfo_outer = [
Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=3, argrepr='3', offset=0, starts_line=2, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=0, argval='a', argrepr='a', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=1, argval='b', argrepr='b', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_TUPLE', opcode=102, arg=2, argval=2, argrepr='', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=code_object_f, argrepr=repr(code_object_f), offset=15, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='outer.<locals>.f', argrepr="'outer.<locals>.f'", offset=18, starts_line=None, is_jump_target=False),
Instruction(opname='MAKE_CLOSURE', opcode=134, arg=2, argval=2, argrepr='', offset=21, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=2, argval='f', argrepr='f', offset=24, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='print', argrepr='print', offset=27, starts_line=7, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=0, argval='a', argrepr='a', offset=30, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=1, argval='b', argrepr='b', offset=33, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval='', argrepr="''", offset=36, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=6, argval=1, argrepr='1', offset=39, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_LIST', opcode=103, arg=0, argval=0, argrepr='', offset=42, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_MAP', opcode=105, arg=0, argval=0, argrepr='', offset=45, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=7, argval='Hello world!', argrepr="'Hello world!'", offset=48, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=7, argval=7, argrepr='7 positional, 0 keyword pair', offset=51, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=54, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=2, argval='f', argrepr='f', offset=55, starts_line=8, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=58, starts_line=None, is_jump_target=False),
]
expected_opinfo_f = [
Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=5, argrepr='5', offset=0, starts_line=3, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=6, argrepr='6', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=2, argval='a', argrepr='a', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=3, argval='b', argrepr='b', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=0, argval='c', argrepr='c', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=1, argval='d', argrepr='d', offset=15, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_TUPLE', opcode=102, arg=4, argval=4, argrepr='', offset=18, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=code_object_inner, argrepr=repr(code_object_inner), offset=21, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='outer.<locals>.f.<locals>.inner', argrepr="'outer.<locals>.f.<locals>.inner'", offset=24, starts_line=None, is_jump_target=False),
Instruction(opname='MAKE_CLOSURE', opcode=134, arg=2, argval=2, argrepr='', offset=27, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=2, argval='inner', argrepr='inner', offset=30, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='print', argrepr='print', offset=33, starts_line=5, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=2, argval='a', argrepr='a', offset=36, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=3, argval='b', argrepr='b', offset=39, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=0, argval='c', argrepr='c', offset=42, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=1, argval='d', argrepr='d', offset=45, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=4, argval=4, argrepr='4 positional, 0 keyword pair', offset=48, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=51, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=2, argval='inner', argrepr='inner', offset=52, starts_line=6, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=55, starts_line=None, is_jump_target=False),
]
expected_opinfo_inner = [
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='print', argrepr='print', offset=0, starts_line=4, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=0, argval='a', argrepr='a', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=1, argval='b', argrepr='b', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=2, argval='c', argrepr='c', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=3, argval='d', argrepr='d', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='e', argrepr='e', offset=15, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=1, argval='f', argrepr='f', offset=18, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=6, argval=6, argrepr='6 positional, 0 keyword pair', offset=21, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=24, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=25, starts_line=None, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=28, starts_line=None, is_jump_target=False),
]
expected_opinfo_jumpy = [
Instruction(opname='SETUP_LOOP', opcode=120, arg=74, argval=77, argrepr='to 77', offset=0, starts_line=3, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='range', argrepr='range', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=10, argrepr='10', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='GET_ITER', opcode=68, arg=None, argval=None, argrepr='', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='FOR_ITER', opcode=93, arg=50, argval=66, argrepr='to 66', offset=13, starts_line=None, is_jump_target=True),
Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=16, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=19, starts_line=4, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=22, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=25, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=28, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=29, starts_line=5, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=32, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=0, argval='<', argrepr='<', offset=35, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=47, argval=47, argrepr='', offset=38, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=13, argval=13, argrepr='', offset=41, starts_line=6, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=0, argval=47, argrepr='to 47', offset=44, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=47, starts_line=7, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=6, argrepr='6', offset=50, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=4, argval='>', argrepr='>', offset=53, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=13, argval=13, argrepr='', offset=56, starts_line=None, is_jump_target=False),
Instruction(opname='BREAK_LOOP', opcode=80, arg=None, argval=None, argrepr='', offset=59, starts_line=8, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=13, argval=13, argrepr='', offset=60, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=13, argval=13, argrepr='', offset=63, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=66, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=67, starts_line=10, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='I can haz else clause?', argrepr="'I can haz else clause?'", offset=70, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=73, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=76, starts_line=None, is_jump_target=False),
Instruction(opname='SETUP_LOOP', opcode=120, arg=74, argval=154, argrepr='to 154', offset=77, starts_line=11, is_jump_target=True),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=80, starts_line=None, is_jump_target=True),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=143, argval=143, argrepr='', offset=83, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=86, starts_line=12, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=89, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=92, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=95, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=96, starts_line=13, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=99, starts_line=None, is_jump_target=False),
Instruction(opname='INPLACE_SUBTRACT', opcode=56, arg=None, argval=None, argrepr='', offset=102, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=103, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=106, starts_line=14, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=6, argrepr='6', offset=109, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=4, argval='>', argrepr='>', offset=112, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=124, argval=124, argrepr='', offset=115, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=80, argval=80, argrepr='', offset=118, starts_line=15, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=0, argval=124, argrepr='to 124', offset=121, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=124, starts_line=16, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=127, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=0, argval='<', argrepr='<', offset=130, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=80, argval=80, argrepr='', offset=133, starts_line=None, is_jump_target=False),
Instruction(opname='BREAK_LOOP', opcode=80, arg=None, argval=None, argrepr='', offset=136, starts_line=17, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=80, argval=80, argrepr='', offset=137, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=80, argval=80, argrepr='', offset=140, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=143, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=144, starts_line=19, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=6, argval='Who let lolcatz into this test suite?', argrepr="'Who let lolcatz into this test suite?'", offset=147, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=150, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=153, starts_line=None, is_jump_target=False),
Instruction(opname='SETUP_FINALLY', opcode=122, arg=72, argval=229, argrepr='to 229', offset=154, starts_line=20, is_jump_target=True),
Instruction(opname='SETUP_EXCEPT', opcode=121, arg=12, argval=172, argrepr='to 172', offset=157, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=160, starts_line=21, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=7, argval=0, argrepr='0', offset=163, starts_line=None, is_jump_target=False),
Instruction(opname='BINARY_TRUE_DIVIDE', opcode=27, arg=None, argval=None, argrepr='', offset=166, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=167, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=168, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=28, argval=200, argrepr='to 200', offset=169, starts_line=None, is_jump_target=False),
Instruction(opname='DUP_TOP', opcode=4, arg=None, argval=None, argrepr='', offset=172, starts_line=22, is_jump_target=True),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=2, argval='ZeroDivisionError', argrepr='ZeroDivisionError', offset=173, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=10, argval='exception match', argrepr='exception match', offset=176, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=199, argval=199, argrepr='', offset=179, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=182, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=183, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=184, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=185, starts_line=23, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=8, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=188, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=191, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=194, starts_line=None, is_jump_target=False),
Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=195, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=26, argval=225, argrepr='to 225', offset=196, starts_line=None, is_jump_target=False),
Instruction(opname='END_FINALLY', opcode=88, arg=None, argval=None, argrepr='', offset=199, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=200, starts_line=25, is_jump_target=True),
Instruction(opname='SETUP_WITH', opcode=143, arg=17, argval=223, argrepr='to 223', offset=203, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=1, argval='dodgy', argrepr='dodgy', offset=206, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=209, starts_line=26, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=9, argval='Never reach this', argrepr="'Never reach this'", offset=212, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=215, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=218, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=219, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=220, starts_line=None, is_jump_target=False),
Instruction(opname='WITH_CLEANUP', opcode=81, arg=None, argval=None, argrepr='', offset=223, starts_line=None, is_jump_target=True),
Instruction(opname='END_FINALLY', opcode=88, arg=None, argval=None, argrepr='', offset=224, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=225, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=226, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=229, starts_line=28, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=232, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=235, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=238, starts_line=None, is_jump_target=False),
Instruction(opname='END_FINALLY', opcode=88, arg=None, argval=None, argrepr='', offset=239, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=240, starts_line=None, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=243, starts_line=None, is_jump_target=False),
]
# One last piece of inspect fodder to check the default line number handling
def simple(): pass
expected_opinfo_simple = [
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=0, starts_line=simple.__code__.co_firstlineno, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=3, starts_line=None, is_jump_target=False)
]
class InstructionTests(BytecodeTestCase):
def test_default_first_line(self):
actual = dis.get_instructions(simple)
self.assertEqual(list(actual), expected_opinfo_simple)
def test_first_line_set_to_None(self):
actual = dis.get_instructions(simple, first_line=None)
self.assertEqual(list(actual), expected_opinfo_simple)
def test_outer(self):
actual = dis.get_instructions(outer, first_line=expected_outer_line)
self.assertEqual(list(actual), expected_opinfo_outer)
def test_nested(self):
with captured_stdout():
f = outer()
actual = dis.get_instructions(f, first_line=expected_f_line)
self.assertEqual(list(actual), expected_opinfo_f)
def test_doubly_nested(self):
with captured_stdout():
inner = outer()()
actual = dis.get_instructions(inner, first_line=expected_inner_line)
self.assertEqual(list(actual), expected_opinfo_inner)
def test_jumpy(self):
actual = dis.get_instructions(jumpy, first_line=expected_jumpy_line)
self.assertEqual(list(actual), expected_opinfo_jumpy)
# get_instructions has its own tests above, so can rely on it to validate
# the object oriented API
class BytecodeTests(unittest.TestCase):
def test_instantiation(self):
# Test with function, method, code string and code object
for obj in [_f, _C(1).__init__, "a=1", _f.__code__]:
with self.subTest(obj=obj):
b = dis.Bytecode(obj)
self.assertIsInstance(b.codeobj, types.CodeType)
self.assertRaises(TypeError, dis.Bytecode, object())
def test_iteration(self):
for obj in [_f, _C(1).__init__, "a=1", _f.__code__]:
with self.subTest(obj=obj):
via_object = list(dis.Bytecode(obj))
via_generator = list(dis.get_instructions(obj))
self.assertEqual(via_object, via_generator)
def test_explicit_first_line(self):
actual = dis.Bytecode(outer, first_line=expected_outer_line)
self.assertEqual(list(actual), expected_opinfo_outer)
def test_source_line_in_disassembly(self):
# Use the line in the source code
actual = dis.Bytecode(simple).dis()[:3]
expected = "{:>3}".format(simple.__code__.co_firstlineno)
self.assertEqual(actual, expected)
# Use an explicit first line number
actual = dis.Bytecode(simple, first_line=350).dis()[:3]
self.assertEqual(actual, "350")
def test_info(self):
self.maxDiff = 1000
for x, expected in CodeInfoTests.test_pairs:
b = dis.Bytecode(x)
self.assertRegex(b.info(), expected)
def test_disassembled(self):
actual = dis.Bytecode(_f).dis()
self.assertEqual(actual, dis_f)
def test_from_traceback(self):
tb = get_tb()
b = dis.Bytecode.from_traceback(tb)
while tb.tb_next: tb = tb.tb_next
self.assertEqual(b.current_offset, tb.tb_lasti)
def test_from_traceback_dis(self):
tb = get_tb()
b = dis.Bytecode.from_traceback(tb)
self.assertEqual(b.dis(), dis_traceback)
if __name__ == "__main__":
unittest.main()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
|
mit
| 9,050,017,357,533,923,000 | 48.695409 | 205 | 0.633255 | false | 3.174798 | true | false | false |
lsst-ts/ts_wep
|
tests/bsc/test_nbrStar.py
|
1
|
2710
|
# This file is part of ts_wep.
#
# Developed for the LSST Telescope and Site Systems.
# This product includes software developed by the LSST Project
# (https://www.lsst.org).
# See the COPYRIGHT file at the top-level directory of this distribution
# for details of code ownership.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import numpy as np
import unittest
from lsst.ts.wep.bsc.StarData import StarData
from lsst.ts.wep.bsc.NbrStar import NbrStar
from lsst.ts.wep.Utility import FilterType
class TestNbrStar(unittest.TestCase):
"""Test the NbrStar class."""
def setUp(self):
stars = StarData(
[123, 456, 789],
[0.1, 0.2, 0.3],
[2.1, 2.2, 2.3],
[2.0, 3.0, 4.0],
[2.1, 2.1, 4.1],
[2.2, 3.2, 4.2],
[2.3, 3.3, 4.3],
[2.4, 3.4, 4.4],
[2.5, 3.5, 4.5],
)
stars.setRaInPixel(stars.getRA() * 10)
stars.setDeclInPixel(stars.getDecl() * 10)
self.stars = stars
self.neighboringStar = NbrStar()
def testGetId(self):
self._addStar()
self.assertTrue(123 in self.neighboringStar.getId())
self.assertTrue(self.neighboringStar.getId()[123], [456])
def _addStar(self):
self.neighboringStar.addStar(self.stars, 0, np.array([1]), FilterType.R)
def testGetRaDecl(self):
self._addStar()
self.assertEqual(
self.neighboringStar.getRaDecl(), {456: (0.2, 2.2), 123: (0.1, 2.1)}
)
def testGetRaDeclInPixel(self):
self._addStar()
self.assertEqual(
self.neighboringStar.getRaDeclInPixel(),
{456: (2.0, 22.0), 123: (1.0, 21.0)},
)
def testGetMag(self):
self._addStar()
self.assertEqual(len(self.neighboringStar.getMag(FilterType.R)), 2)
self.assertEqual(self.neighboringStar.getMag(FilterType.U), {})
def testAddStarAndGetData(self):
self._addStar()
self.assertNotEqual(len(self.neighboringStar.getId()), 0)
if __name__ == "__main__":
# Do the unit test
unittest.main()
|
gpl-3.0
| 5,397,523,749,416,159,000 | 28.139785 | 80 | 0.628782 | false | 3.288835 | true | false | false |
praekelt/vumi-go
|
go/apps/jsbox/metrics.py
|
1
|
3058
|
# -*- test-case-name: go.apps.jsbox.tests.test_metrics -*-
# -*- coding: utf-8 -*-
"""Metrics for JS Box sandboxes"""
import re
from vxsandbox import SandboxResource
from vumi.blinkenlights.metrics import SUM, AVG, MIN, MAX, LAST
class MetricEventError(Exception):
"""Raised when a command cannot be converted to a metric event."""
class MetricEvent(object):
AGGREGATORS = {
'sum': SUM,
'avg': AVG,
'min': MIN,
'max': MAX,
'last': LAST
}
NAME_REGEX = re.compile(r"^[a-zA-Z][a-zA-Z0-9._-]{,100}$")
def __init__(self, store, metric, value, agg):
self.store = store
self.metric = metric
self.value = value
self.agg = agg
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return all((self.store == other.store, self.metric == other.metric,
self.value == other.value, self.agg is other.agg))
@classmethod
def _parse_name(cls, name, kind):
if name is None:
raise MetricEventError("Missing %s name." % (kind,))
if not isinstance(name, basestring):
raise MetricEventError("Invalid type for %s name: %r"
% (kind, name))
if not cls.NAME_REGEX.match(name):
raise MetricEventError("Invalid %s name: %r." % (kind, name))
return name
@classmethod
def _parse_value(cls, value):
try:
value = float(value)
except (ValueError, TypeError):
raise MetricEventError("Invalid metric value %r." % (value,))
return value
@classmethod
def _parse_agg(cls, agg):
if not isinstance(agg, basestring):
raise MetricEventError("Invalid metric aggregator %r" % (agg,))
if agg not in cls.AGGREGATORS:
raise MetricEventError("Invalid metric aggregator %r." % (agg,))
return cls.AGGREGATORS[agg]
@classmethod
def from_command(cls, command):
store = cls._parse_name(command.get('store', 'default'), 'store')
metric = cls._parse_name(command.get('metric'), 'metric')
value = cls._parse_value(command.get('value'))
agg = cls._parse_agg(command.get('agg'))
return cls(store, metric, value, agg)
class MetricsResource(SandboxResource):
"""Resource that provides metric storing."""
def _publish_event(self, api, ev):
conversation = self.app_worker.conversation_for_api(api)
self.app_worker.publish_account_metric(conversation.user_account.key,
ev.store, ev.metric, ev.value,
ev.agg)
def handle_fire(self, api, command):
"""Fire a metric value."""
try:
ev = MetricEvent.from_command(command)
except MetricEventError, e:
return self.reply(command, success=False, reason=unicode(e))
self._publish_event(api, ev)
return self.reply(command, success=True)
|
bsd-3-clause
| 1,665,057,652,388,163,000 | 31.88172 | 77 | 0.577502 | false | 3.905492 | false | false | false |
calexil/FightstickDisplay
|
pyglet/image/buffer.py
|
1
|
9407
|
# ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# Copyright (c) 2008-2021 pyglet contributors
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
from pyglet.gl import *
def get_max_color_attachments():
"""Get the maximum allow Framebuffer Color attachements"""
number = GLint()
glGetIntegerv(GL_MAX_COLOR_ATTACHMENTS, number)
return number.value
class Renderbuffer:
"""OpenGL Renderbuffer Object"""
def __init__(self, width, height, internal_format, samples=1):
"""Create an instance of a Renderbuffer object."""
self._id = GLuint()
self._width = width
self._height = height
self._internal_format = internal_format
glGenRenderbuffers(1, self._id)
glBindRenderbuffer(GL_RENDERBUFFER, self._id)
if samples > 1:
glRenderbufferStorageMultisample(GL_RENDERBUFFER, samples, internal_format, width, height)
else:
glRenderbufferStorage(GL_RENDERBUFFER, internal_format, width, height)
glBindRenderbuffer(GL_RENDERBUFFER, 0)
@property
def id(self):
return self._id.value
@property
def width(self):
return self._width
@property
def height(self):
return self._height
def bind(self):
glBindRenderbuffer(GL_RENDERBUFFER, self._id)
@staticmethod
def unbind():
glBindRenderbuffer(GL_RENDERBUFFER, 0)
def delete(self):
glDeleteRenderbuffers(1, self._id)
def __del__(self):
try:
glDeleteRenderbuffers(1, self._id)
# Python interpreter is shutting down:
except ImportError:
pass
def __repr__(self):
return "{}(id={})".format(self.__class__.__name__, self._id.value)
class Framebuffer:
"""OpenGL Framebuffer Object"""
def __init__(self, target=GL_FRAMEBUFFER):
"""Create an OpenGL Framebuffer object.
:rtype: :py:class:`~pyglet.image.Framebuffer`
.. versionadded:: 2.0
"""
self._id = GLuint()
glGenFramebuffers(1, self._id)
self._attachment_types = 0
self._width = 0
self._height = 0
self.target = target
@property
def id(self):
return self._id.value
@property
def width(self):
"""The width of the widest attachment."""
return self._width
@property
def height(self):
"""The width of the widest attachment."""
return self._height
def bind(self):
glBindFramebuffer(self.target, self._id)
def unbind(self):
glBindFramebuffer(self.target, 0)
def clear(self):
if self._attachment_types:
self.bind()
glClear(self._attachment_types)
self.unbind()
def delete(self):
glDeleteFramebuffers(1, self._id)
@property
def is_complete(self):
return glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_COMPLETE
@staticmethod
def get_status():
states = {GL_FRAMEBUFFER_UNSUPPORTED: "Framebuffer unsupported. Try another format.",
GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT: "Framebuffer incomplete attachment.",
GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT: "Framebuffer missing attachment.",
GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS_EXT: "Framebuffer unsupported dimension.",
GL_FRAMEBUFFER_INCOMPLETE_FORMATS_EXT: "Framebuffer incomplete formats.",
GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER: "Framebuffer incomplete draw buffer.",
GL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER: "Framebuffer incomplete read buffer.",
GL_FRAMEBUFFER_COMPLETE: "Framebuffer is complete."}
gl_status = glCheckFramebufferStatus(GL_FRAMEBUFFER)
return states.get(gl_status, "Unknown error")
def attach_texture(self, texture, target=GL_FRAMEBUFFER, attachment=GL_COLOR_ATTACHMENT0):
"""Attach a Texture to the Framebuffer
:Parameters:
`texture` : pyglet.image.Texture
Specifies the texture object to attach to the framebuffer attachment
point named by attachment.
`target` : int
Specifies the framebuffer target. target must be GL_DRAW_FRAMEBUFFER,
GL_READ_FRAMEBUFFER, or GL_FRAMEBUFFER. GL_FRAMEBUFFER is equivalent
to GL_DRAW_FRAMEBUFFER.
`attachment` : int
Specifies the attachment point of the framebuffer. attachment must be
GL_COLOR_ATTACHMENTi, GL_DEPTH_ATTACHMENT, GL_STENCIL_ATTACHMENT or
GL_DEPTH_STENCIL_ATTACHMENT.
"""
self.bind()
glFramebufferTexture(target, attachment, texture.id, texture.level)
# glFramebufferTexture2D(target, attachment, texture.target, texture.id, texture.level)
self._attachment_types |= attachment
self._width = max(texture.width, self._width)
self._height = max(texture.height, self._height)
self.unbind()
def attach_texture_layer(self, texture, layer, level, target=GL_FRAMEBUFFER, attachment=GL_COLOR_ATTACHMENT0):
"""Attach a Texture layer to the Framebuffer
:Parameters:
`texture` : pyglet.image.TextureArray
Specifies the texture object to attach to the framebuffer attachment
point named by attachment.
`layer` : int
Specifies the layer of texture to attach.
`level` : int
Specifies the mipmap level of texture to attach.
`target` : int
Specifies the framebuffer target. target must be GL_DRAW_FRAMEBUFFER,
GL_READ_FRAMEBUFFER, or GL_FRAMEBUFFER. GL_FRAMEBUFFER is equivalent
to GL_DRAW_FRAMEBUFFER.
`attachment` : int
Specifies the attachment point of the framebuffer. attachment must be
GL_COLOR_ATTACHMENTi, GL_DEPTH_ATTACHMENT, GL_STENCIL_ATTACHMENT or
GL_DEPTH_STENCIL_ATTACHMENT.
"""
self.bind()
glFramebufferTextureLayer(target, attachment, texture.id, level, layer)
self._attachment_types |= attachment
self._width = max(texture.width, self._width)
self._height = max(texture.height, self._height)
self.unbind()
def attach_renderbuffer(self, renderbuffer, target=GL_FRAMEBUFFER, attachment=GL_COLOR_ATTACHMENT0):
""""Attach a Renderbuffer to the Framebuffer
:Parameters:
`renderbuffer` : pyglet.image.Renderbuffer
Specifies the Renderbuffer to attach to the framebuffer attachment
point named by attachment.
`target` : int
Specifies the framebuffer target. target must be GL_DRAW_FRAMEBUFFER,
GL_READ_FRAMEBUFFER, or GL_FRAMEBUFFER. GL_FRAMEBUFFER is equivalent
to GL_DRAW_FRAMEBUFFER.
`attachment` : int
Specifies the attachment point of the framebuffer. attachment must be
GL_COLOR_ATTACHMENTi, GL_DEPTH_ATTACHMENT, GL_STENCIL_ATTACHMENT or
GL_DEPTH_STENCIL_ATTACHMENT.
"""
self.bind()
glFramebufferRenderbuffer(target, attachment, GL_RENDERBUFFER, renderbuffer.id)
self._attachment_types |= attachment
self._width = max(renderbuffer.width, self._width)
self._height = max(renderbuffer.height, self._height)
self.unbind()
def __del__(self):
try:
glDeleteFramebuffers(1, self._id)
# Python interpreter is shutting down:
except ImportError:
pass
def __repr__(self):
return "{}(id={})".format(self.__class__.__name__, self._id.value)
|
gpl-3.0
| 933,900,144,839,290,800 | 37.395918 | 114 | 0.635059 | false | 4.412289 | false | false | false |
glogiotatidis/mopidy
|
mopidy/backend.py
|
1
|
12306
|
from __future__ import absolute_import, unicode_literals
from mopidy import listener, models
class Backend(object):
"""Backend API
If the backend has problems during initialization it should raise
:exc:`mopidy.exceptions.BackendError` with a descriptive error message.
This will make Mopidy print the error message and exit so that the user can
fix the issue.
:param config: the entire Mopidy configuration
:type config: dict
:param audio: actor proxy for the audio subsystem
:type audio: :class:`pykka.ActorProxy` for :class:`mopidy.audio.Audio`
"""
#: Actor proxy to an instance of :class:`mopidy.audio.Audio`.
#:
#: Should be passed to the backend constructor as the kwarg ``audio``,
#: which will then set this field.
audio = None
#: The library provider. An instance of
#: :class:`~mopidy.backend.LibraryProvider`, or :class:`None` if
#: the backend doesn't provide a library.
library = None
#: The playback provider. An instance of
#: :class:`~mopidy.backend.PlaybackProvider`, or :class:`None` if
#: the backend doesn't provide playback.
playback = None
#: The playlists provider. An instance of
#: :class:`~mopidy.backend.PlaylistsProvider`, or class:`None` if
#: the backend doesn't provide playlists.
playlists = None
#: List of URI schemes this backend can handle.
uri_schemes = []
# Because the providers is marked as pykka_traversible, we can't get() them
# from another actor, and need helper methods to check if the providers are
# set or None.
def has_library(self):
return self.library is not None
def has_library_browse(self):
return self.has_library() and self.library.root_directory is not None
def has_playback(self):
return self.playback is not None
def has_playlists(self):
return self.playlists is not None
def ping(self):
"""Called to check if the actor is still alive."""
return True
class LibraryProvider(object):
"""
:param backend: backend the controller is a part of
:type backend: :class:`mopidy.backend.Backend`
"""
pykka_traversable = True
root_directory = None
"""
:class:`mopidy.models.Ref.directory` instance with a URI and name set
representing the root of this library's browse tree. URIs must
use one of the schemes supported by the backend, and name should
be set to a human friendly value.
*MUST be set by any class that implements* :meth:`LibraryProvider.browse`.
"""
def __init__(self, backend):
self.backend = backend
def browse(self, uri):
"""
See :meth:`mopidy.core.LibraryController.browse`.
If you implement this method, make sure to also set
:attr:`root_directory`.
*MAY be implemented by subclass.*
"""
return []
def get_distinct(self, field, query=None):
"""
See :meth:`mopidy.core.LibraryController.get_distinct`.
*MAY be implemented by subclass.*
Default implementation will simply return an empty set.
"""
return set()
def get_images(self, uris):
"""
See :meth:`mopidy.core.LibraryController.get_images`.
*MAY be implemented by subclass.*
Default implementation will simply call lookup and try and use the
album art for any tracks returned. Most extensions should replace this
with something smarter or simply return an empty dictionary.
"""
result = {}
for uri in uris:
image_uris = set()
for track in self.lookup(uri):
if track.album and track.album.images:
image_uris.update(track.album.images)
result[uri] = [models.Image(uri=u) for u in image_uris]
return result
def lookup(self, uri):
"""
See :meth:`mopidy.core.LibraryController.lookup`.
*MUST be implemented by subclass.*
"""
raise NotImplementedError
def refresh(self, uri=None):
"""
See :meth:`mopidy.core.LibraryController.refresh`.
*MAY be implemented by subclass.*
"""
pass
def search(self, query=None, uris=None, exact=False):
"""
See :meth:`mopidy.core.LibraryController.search`.
*MAY be implemented by subclass.*
.. versionadded:: 1.0
The ``exact`` param which replaces the old ``find_exact``.
"""
pass
class PlaybackProvider(object):
"""
:param audio: the audio actor
:type audio: actor proxy to an instance of :class:`mopidy.audio.Audio`
:param backend: the backend
:type backend: :class:`mopidy.backend.Backend`
"""
pykka_traversable = True
def __init__(self, audio, backend):
self.audio = audio
self.backend = backend
def pause(self):
"""
Pause playback.
*MAY be reimplemented by subclass.*
:rtype: :class:`True` if successful, else :class:`False`
"""
return self.audio.pause_playback().get()
def play(self):
"""
Start playback.
*MAY be reimplemented by subclass.*
:rtype: :class:`True` if successful, else :class:`False`
"""
return self.audio.start_playback().get()
def prepare_change(self):
"""
Indicate that an URI change is about to happen.
*MAY be reimplemented by subclass.*
It is extremely unlikely it makes sense for any backends to override
this. For most practical purposes it should be considered an internal
call between backends and core that backend authors should not touch.
"""
self.audio.prepare_change().get()
def translate_uri(self, uri):
"""
Convert custom URI scheme to real playable URI.
*MAY be reimplemented by subclass.*
This is very likely the *only* thing you need to override as a backend
author. Typically this is where you convert any Mopidy specific URI
to a real URI and then return it. If you can't convert the URI just
return :class:`None`.
:param uri: the URI to translate
:type uri: string
:rtype: string or :class:`None` if the URI could not be translated
"""
return uri
def change_track(self, track):
"""
Swith to provided track.
*MAY be reimplemented by subclass.*
It is unlikely it makes sense for any backends to override
this. For most practical purposes it should be considered an internal
call between backends and core that backend authors should not touch.
The default implementation will call :meth:`translate_uri` which
is what you want to implement.
:param track: the track to play
:type track: :class:`mopidy.models.Track`
:rtype: :class:`True` if successful, else :class:`False`
"""
uri = self.translate_uri(track.uri)
if not uri:
return False
self.audio.set_uri(uri).get()
return True
def resume(self):
"""
Resume playback at the same time position playback was paused.
*MAY be reimplemented by subclass.*
:rtype: :class:`True` if successful, else :class:`False`
"""
return self.audio.start_playback().get()
def seek(self, time_position):
"""
Seek to a given time position.
*MAY be reimplemented by subclass.*
:param time_position: time position in milliseconds
:type time_position: int
:rtype: :class:`True` if successful, else :class:`False`
"""
return self.audio.set_position(time_position).get()
def stop(self):
"""
Stop playback.
*MAY be reimplemented by subclass.*
Should not be used for tracking if tracks have been played or when we
are done playing them.
:rtype: :class:`True` if successful, else :class:`False`
"""
return self.audio.stop_playback().get()
def get_time_position(self):
"""
Get the current time position in milliseconds.
*MAY be reimplemented by subclass.*
:rtype: int
"""
return self.audio.get_position().get()
class PlaylistsProvider(object):
"""
A playlist provider exposes a collection of playlists, methods to
create/change/delete playlists in this collection, and lookup of any
playlist the backend knows about.
:param backend: backend the controller is a part of
:type backend: :class:`mopidy.backend.Backend` instance
"""
pykka_traversable = True
def __init__(self, backend):
self.backend = backend
def as_list(self):
"""
Get a list of the currently available playlists.
Returns a list of :class:`~mopidy.models.Ref` objects referring to the
playlists. In other words, no information about the playlists' content
is given.
:rtype: list of :class:`mopidy.models.Ref`
.. versionadded:: 1.0
"""
raise NotImplementedError
def get_items(self, uri):
"""
Get the items in a playlist specified by ``uri``.
Returns a list of :class:`~mopidy.models.Ref` objects referring to the
playlist's items.
If a playlist with the given ``uri`` doesn't exist, it returns
:class:`None`.
:rtype: list of :class:`mopidy.models.Ref`, or :class:`None`
.. versionadded:: 1.0
"""
raise NotImplementedError
def create(self, name):
"""
Create a new empty playlist with the given name.
Returns a new playlist with the given name and an URI.
*MUST be implemented by subclass.*
:param name: name of the new playlist
:type name: string
:rtype: :class:`mopidy.models.Playlist`
"""
raise NotImplementedError
def delete(self, uri):
"""
Delete playlist identified by the URI.
*MUST be implemented by subclass.*
:param uri: URI of the playlist to delete
:type uri: string
"""
raise NotImplementedError
def lookup(self, uri):
"""
Lookup playlist with given URI in both the set of playlists and in any
other playlist source.
Returns the playlists or :class:`None` if not found.
*MUST be implemented by subclass.*
:param uri: playlist URI
:type uri: string
:rtype: :class:`mopidy.models.Playlist` or :class:`None`
"""
raise NotImplementedError
def refresh(self):
"""
Refresh the playlists in :attr:`playlists`.
*MUST be implemented by subclass.*
"""
raise NotImplementedError
def save(self, playlist):
"""
Save the given playlist.
The playlist must have an ``uri`` attribute set. To create a new
playlist with an URI, use :meth:`create`.
Returns the saved playlist or :class:`None` on failure.
*MUST be implemented by subclass.*
:param playlist: the playlist to save
:type playlist: :class:`mopidy.models.Playlist`
:rtype: :class:`mopidy.models.Playlist` or :class:`None`
"""
raise NotImplementedError
class BackendListener(listener.Listener):
"""
Marker interface for recipients of events sent by the backend actors.
Any Pykka actor that mixes in this class will receive calls to the methods
defined here when the corresponding events happen in a backend actor. This
interface is used both for looking up what actors to notify of the events,
and for providing default implementations for those listeners that are not
interested in all events.
Normally, only the Core actor should mix in this class.
"""
@staticmethod
def send(event, **kwargs):
"""Helper to allow calling of backend listener events"""
listener.send_async(BackendListener, event, **kwargs)
def playlists_loaded(self):
"""
Called when playlists are loaded or refreshed.
*MAY* be implemented by actor.
"""
pass
|
apache-2.0
| -5,506,081,038,595,665,000 | 27.887324 | 79 | 0.622461 | false | 4.328526 | false | false | false |
CMPUT410W15/cmput410-project
|
socialdistribution/urls.py
|
1
|
1265
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from login.views import *
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf import settings
from django.conf.urls.static import static
from posts.views import *
from images.views import *
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'socialdistribution.views.home', name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^friends/', include('author.urls')),
url(r'^api/', include('api.urls')),
url(r'^post/', include('posts.urls')),
url(r'^images/', include('images.urls')),
url(r'^$', home),
url(r'^logout/$', logout_page),
url(r'^accounts/login/$', 'django.contrib.auth.views.login'), # If user is not login it will redirect to login page
url(r'^register/$', register),
url(r'^register/success/$', register_success),
url(r'^home/$', home),
url(r'^home/([^/]+)/$', authorhome),
url(r'^home/author/posts/$',personal_stream),
url(r'^home/author/posts/friends/$',personal_stream_friends),
url(r'^post/(?P<post_id>[\w-]+)/$', comment, name="add_comment"),
)+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += staticfiles_urlpatterns()
|
apache-2.0
| 8,455,670,157,464,625,000 | 36.205882 | 119 | 0.670356 | false | 3.553371 | false | true | false |
hb9kns/PyBitmessage
|
src/bitmessageqt/dialogs.py
|
1
|
2513
|
from PyQt4 import QtGui
from tr import _translate
from retranslateui import RetranslateMixin
import widgets
from newchandialog import NewChanDialog
from address_dialogs import (
AddAddressDialog, NewAddressDialog, NewSubscriptionDialog,
RegenerateAddressesDialog, SpecialAddressBehaviorDialog, EmailGatewayDialog
)
import paths
from version import softwareVersion
__all__ = [
"NewChanDialog", "AddAddressDialog", "NewAddressDialog",
"NewSubscriptionDialog", "RegenerateAddressesDialog",
"SpecialAddressBehaviorDialog", "EmailGatewayDialog"
]
class AboutDialog(QtGui.QDialog, RetranslateMixin):
def __init__(self, parent=None):
super(AboutDialog, self).__init__(parent)
widgets.load('about.ui', self)
last_commit = paths.lastCommit()
version = softwareVersion
commit = last_commit.get('commit')
if commit:
version += '-' + commit[:7]
self.labelVersion.setText(
self.labelVersion.text().replace(
':version:', version
).replace(':branch:', commit or 'v%s' % version)
)
self.labelVersion.setOpenExternalLinks(True)
try:
self.label_2.setText(
self.label_2.text().replace(
'2017', str(last_commit.get('time').year)
))
except AttributeError:
pass
self.setFixedSize(QtGui.QWidget.sizeHint(self))
class IconGlossaryDialog(QtGui.QDialog, RetranslateMixin):
def __init__(self, parent=None, config=None):
super(IconGlossaryDialog, self).__init__(parent)
widgets.load('iconglossary.ui', self)
# FIXME: check the window title visibility here
self.groupBox.setTitle('')
self.labelPortNumber.setText(_translate(
"iconGlossaryDialog",
"You are using TCP port %1. (This can be changed in the settings)."
).arg(config.getint('bitmessagesettings', 'port')))
self.setFixedSize(QtGui.QWidget.sizeHint(self))
class HelpDialog(QtGui.QDialog, RetranslateMixin):
def __init__(self, parent=None):
super(HelpDialog, self).__init__(parent)
widgets.load('help.ui', self)
self.setFixedSize(QtGui.QWidget.sizeHint(self))
class ConnectDialog(QtGui.QDialog, RetranslateMixin):
def __init__(self, parent=None):
super(ConnectDialog, self).__init__(parent)
widgets.load('connect.ui', self)
self.setFixedSize(QtGui.QWidget.sizeHint(self))
|
mit
| 1,662,048,093,800,792,800 | 32.065789 | 79 | 0.652209 | false | 4.086179 | false | false | false |
phiros/nepi
|
src/nepi/resources/ns3/classes/single_model_spectrum_channel.py
|
1
|
3165
|
#
# NEPI, a framework to manage network experiments
# Copyright (C) 2014 INRIA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from nepi.execution.attribute import Attribute, Flags, Types
from nepi.execution.trace import Trace, TraceAttr
from nepi.execution.resource import ResourceManager, clsinit_copy, \
ResourceState
from nepi.resources.ns3.ns3channel import NS3BaseChannel
@clsinit_copy
class NS3SingleModelSpectrumChannel(NS3BaseChannel):
_rtype = "ns3::SingleModelSpectrumChannel"
@classmethod
def _register_attributes(cls):
attr_maxlossdb = Attribute("MaxLossDb",
"If a single-frequency PropagationLossModel is used, this value represents the maximum loss in dB for which transmissions will be passed to the receiving PHY. Signals for which the PropagationLossModel returns a loss bigger than this value will not be propagated to the receiver. This parameter is to be used to reduce the computational load by not propagating signals that are far beyond the interference range. Note that the default value corresponds to considering all signals for reception. Tune this value with care. ",
type = Types.Double,
default = "1e+09",
allowed = None,
range = None,
flags = Flags.Reserved | Flags.Construct)
cls._register_attribute(attr_maxlossdb)
attr_id = Attribute("Id",
"The id (unique integer) of this Channel.",
type = Types.Integer,
default = "0",
allowed = None,
range = None,
flags = Flags.Reserved | Flags.NoWrite)
cls._register_attribute(attr_id)
@classmethod
def _register_traces(cls):
pathloss = Trace("PathLoss", "This trace is fired whenever a new path loss value is calculated. The first and second parameters to the trace are pointers respectively to the TX and RX SpectrumPhy instances, whereas the third parameters is the loss value in dB. Note that the loss value reported by this trace is the single-frequency loss value obtained by evaluating only the TX and RX AntennaModels and the PropagationLossModel. In particular, note that SpectrumPropagationLossModel (even if present) is never used to evaluate the loss value reported in this trace. ")
cls._register_trace(pathloss)
def __init__(self, ec, guid):
super(NS3SingleModelSpectrumChannel, self).__init__(ec, guid)
self._home = "ns3-single-model-spectrum-channel-%s" % self.guid
|
gpl-3.0
| 3,667,919,845,605,207,000 | 47.692308 | 577 | 0.706793 | false | 4.294437 | false | false | false |
rndusr/stig
|
stig/commands/cli/misc.py
|
1
|
1667
|
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details
# http://www.gnu.org/licenses/gpl-3.0.txt
"""Documentation commands"""
from .. import CmdError
from ... import objects
from ..base import misc as base
from ...logging import make_logger # isort:skip
log = make_logger(__name__)
class HelpCmd(base.HelpCmdbase):
provides = {'cli'}
async def run(self, TOPIC):
# If TOPIC is a setting and it is managed by the server, we must fetch
# config values from the server so we can display its current value.
for topic in TOPIC:
if topic.startswith('srv.'):
try:
await objects.srvapi.settings.update()
except objects.srvapi.ClientError as e:
self.error(e)
finally:
break
return super().run(TOPIC)
def display_help(self, topics, lines):
for line in lines:
print(line)
class VersionCmd(base.VersionCmdbase):
provides = {'cli'}
class LogCmd(base.LogCmdbase):
provides = {'cli'}
def _do(self, action, *args):
cmd_str = '%s %s' % (action, ' '.join(args))
raise CmdError('Unsupported command in CLI mode: %s' % cmd_str)
|
gpl-3.0
| 2,688,278,037,507,489,000 | 31.057692 | 78 | 0.644271 | false | 4.046117 | false | false | false |
route-nazionale/event_manager
|
ragazzi/views.py
|
1
|
1103
|
from django.shortcuts import render
from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404
from django.views.decorators.http import require_POST
from django.views.decorators.csrf import csrf_exempt
from base.views_support import HttpJSONResponse
from base.models import Rover, Event
from base.models.event import EventTurno1, EventTurno2, EventTurno3
import json
@csrf_exempt
@require_POST
def boy_evaluate(request, pk):
if not request.user.is_staff or request.session.get('valid'):
raise PermissionDenied()
data = json.loads(request.body)
rover = get_object_or_404(Rover, pk=pk)
# Step 1: simulation of new labs assignment
rover.turno1 = EventTurno1.objects.get(code=data['turno1'])
rover.turno2 = EventTurno2.objects.get(code=data['turno2'])
rover.turno3 = EventTurno3.objects.get(code=data['turno3'])
# Step 2: check constraints
msgs_constraints = rover.check_constraints()
msgs_constraints['satisfaction'] = rover.calculate_satisfaction()
return HttpJSONResponse(msgs_constraints)
|
agpl-3.0
| 2,945,973,249,241,085,400 | 28.026316 | 69 | 0.752493 | false | 3.535256 | false | false | false |
watchdogpolska/watchdog-kj-kultura
|
watchdog_kj_kultura/organizations_requests/tests/test_forms.py
|
1
|
3033
|
from django.core import mail
from django.test import RequestFactory, TestCase
from ...organizations.factories import OrganizationFactory
from ..factories import TemplateFactory
from ..forms import RequestForm
class RequestFormTestCase(TestCase):
def setUp(self):
self.organization = OrganizationFactory()
self.template = TemplateFactory()
self.factory = RequestFactory()
self.request = self.factory.get('/customer/details')
def test_form_is_valid(self):
body = "Lorem_FOO_BAR_Ipsum"
form = RequestForm(data={'email': '[email protected]',
'body': body,
'email_user': '[email protected]'},
organization=self.organization,
template=self.template,
request=self.request)
self.assertTrue(form.is_valid(), msg=form.errors)
def test_send_email_to_organization(self):
body = "Lorem_CONTENT_Ipsum"
form = RequestForm(data={'email': '[email protected]',
'body': body,
'email_user': '[email protected]'},
organization=self.organization,
template=self.template,
request=self.request)
self.assertTrue(form.is_valid(), msg=form.errors)
form.save()
self.assertEqual(len(mail.outbox), 2)
self.assertIn(self.organization.email, mail.outbox[0].to)
self.assertEqual(mail.outbox[0].subject, self.template.subject)
self.assertIn(body, mail.outbox[0].body)
def test_send_notification_to_user(self):
body = "Lorem_CONTENT_Ipsum"
form = RequestForm(data={'email': '[email protected]',
'body': body,
'email_user': '[email protected]'},
organization=self.organization,
template=self.template,
request=self.request)
self.assertTrue(form.is_valid(), msg=form.errors)
form.save()
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(len(mail.outbox), 2)
self.assertIn('[email protected]', mail.outbox[1].to)
self.assertEqual(mail.outbox[1].subject, self.template.subject)
self.assertIn(body, mail.outbox[1].body)
def test_require_email_in_body(self):
kwargs = dict(data={'email': '[email protected]',
'body': 'jacob',
'email_user': '[email protected]'},
organization=self.organization,
template=TemplateFactory(email_required=True),
request=self.request)
form = RequestForm(**kwargs)
self.assertFalse(form.is_valid())
kwargs['data']['body'] = kwargs['data']['email_user']
form = RequestForm(**kwargs)
self.assertTrue(form.is_valid())
|
mit
| 387,663,058,317,457,540 | 42.328571 | 71 | 0.554566 | false | 4.395652 | true | false | false |
e-koch/VLA_Lband
|
14B-088/HI/imaging/sd_regridding/sd_comparison.py
|
1
|
3520
|
'''
Compare the regridded versions of the SD datasets.
'''
from spectral_cube import SpectralCube
import matplotlib.pyplot as plt
import os
from corner import hist2d
from radio_beam import Beam
import astropy.units as u
import numpy as np
from paths import fourteenB_HI_data_path, data_path
from galaxy_params import gal
# Load in the 4 cubes and run.
vla_cube = SpectralCube.read(fourteenB_HI_data_path("M33_14B-088_HI.clean.image.fits"))
arecibo_path = os.path.join(data_path, "Arecibo")
# Spectral interpolation, followed by reprojection.
arecibo_name = \
os.path.join(arecibo_path,
"14B-088_items_new/m33_arecibo_14B088.fits")
arecibo_cube = SpectralCube.read(arecibo_name)
ebhis_path = os.path.join(data_path, "EBHIS")
# Spectral interpolation, followed by reprojection.
ebhis_name = os.path.join(ebhis_path, "14B-088_items/m33_ebhis_14B088.fits")
ebhis_cube = SpectralCube.read(ebhis_name)
gbt_path = os.path.join(data_path, "GBT")
gbt_name = os.path.join(gbt_path, "14B-088_items/m33_gbt_vlsr_highres_Tmb_14B088.fits")
gbt_cube = SpectralCube.read(gbt_name)
gbt_lowres_name = os.path.join(gbt_path, "14B-088_items/m33_gbt_vlsr_Tmb_14B088.fits")
gbt_lowres_cube = SpectralCube.read(gbt_lowres_name)
# Compare total emission in the cubes.
vla_mask = np.isfinite(vla_cube[0])
arecibo_sum = arecibo_cube.with_mask(vla_mask).sum()
ebhis_sum = ebhis_cube.with_mask(vla_mask).sum()
gbt_sum = gbt_cube.with_mask(vla_mask).sum()
gbt_lowres_sum = gbt_lowres_cube.with_mask(vla_mask).sum()
plt.plot(arecibo_sum, ebhis_sum, gbt_sum, gbt_lowres_sum)
# Compare intensities in one plane
# arecibo_plane = arecibo_cube[500]
# ebhis_plane = ebhis_cube[500]
# gbt_plane = gbt_cube[500]
# gbt_plane[np.isnan(gbt_plane)] = 0.0 * u.K
# gbt_lowres_plane = gbt_lowres_cube[500]
# # Convolve GBT to match EBHIS
# beam_fwhm = lambda diam: ((1.2 * 21 * u.cm) / diam.to(u.cm)) * u.rad
# gbt_90m_beam = Beam(beam_fwhm(90 * u.m))
# gbt_plane._beam = gbt_90m_beam
# gbt_plane_convolved = gbt_plane.convolve_to(ebhis_plane.beam)
# gbt_100m_beam = Beam(beam_fwhm(100 * u.m))
# gbt_plane._beam = gbt_100m_beam
# gbt_plane_convolved_100 = gbt_plane.convolve_to(ebhis_plane.beam)
# ax = plt.subplot(131)
# hist2d(gbt_plane.value.ravel(), ebhis_plane.value.ravel(), ax=ax)
# plt.plot([0, 15], [0, 15])
# ax2 = plt.subplot(132)
# hist2d(gbt_plane_convolved.value.ravel(), ebhis_plane.value.ravel(), ax=ax2)
# plt.plot([0, 15], [0, 15])
# ax3 = plt.subplot(133)
# hist2d(gbt_plane_convolved_100.value.ravel(), ebhis_plane.value.ravel(), ax=ax3)
# plt.plot([0, 15], [0, 15])
# Best match for GBT is with a 106 m beam, convolved to the 80 m of EBHIS.
# Well, something is wrong here. It has to be that the difference between the
# data is a 80 m deconvolved w/ a 106 m beam. The EBHIS beam size should then
# be slightly smaller?
# Now convolve the Arecibo down to the GBT.
# gbt_90m_beam = Beam(beam_fwhm(90 * u.m))
# arecibo_plane_convolved = arecibo_plane.convolve_to(gbt_90m_beam)
# gbt_100m_beam = Beam(beam_fwhm(100 * u.m))
# arecibo_plane_convolved_100 = arecibo_plane.convolve_to(gbt_100m_beam)
# ax = plt.subplot(131)
# hist2d(arecibo_plane.value.ravel(), gbt_plane.value.ravel(), ax=ax)
# plt.plot([0, 15], [0, 15])
# ax2 = plt.subplot(132)
# hist2d(arecibo_plane_convolved.value.ravel(), gbt_plane.value.ravel(), ax=ax2)
# plt.plot([0, 15], [0, 15])
# ax3 = plt.subplot(133)
# hist2d(arecibo_plane_convolved_100.value.ravel(), gbt_plane.value.ravel(), ax=ax3)
# plt.plot([0, 15], [0, 15])
|
mit
| -5,906,216,601,364,028,000 | 31.009091 | 87 | 0.704545 | false | 2.367182 | false | false | false |
Kovak/KivyNBT
|
flat_kivy/uix/behaviors.py
|
1
|
19077
|
from weakref import ref
from kivy.app import App
from kivy.clock import Clock
from kivy.properties import (ObjectProperty, OptionProperty, NumericProperty,
ListProperty, StringProperty)
from kivy.metrics import sp
from kivy.animation import Animation
from kivy.graphics import Color, Ellipse, Rectangle
from kivy.graphics import (StencilPush, StencilPop, StencilUse,
StencilUnUse, Color, Rectangle)
try:
from kivy.graphics import (ScissorPush, ScissorPop)
except ImportError:
_has_scissor_instr = False
else:
_has_scissor_instr = True
from flat_kivy.utils import construct_data_resource
from flat_kivy.logmanager import LogManager
class ThemeBehavior(object):
theme = ListProperty([])
def on_theme(self, instance, value):
if value != []:
app = App.get_running_app()
theme = app.theme_manager.get_theme(value[0], value[1])
types = app.theme_manager.get_theme_types()
for each in types:
if isinstance(self, types[each]):
try:
theme_def = theme[each]
except:
print(each, 'not in theme', value[0], value[1], self)
continue
for propname in theme_def:
setattr(self, propname, theme_def[propname])
class GrabBehavior(object):
last_touch = ObjectProperty(None)
def on_touch_down(self, touch):
if touch.is_mouse_scrolling:
return False
if self.disabled:
return False
if not self.collide_point(touch.x, touch.y):
return False
if self in touch.ud:
return False
touch.grab(self)
touch.ud[self] = True
self.last_touch = touch
return super(GrabBehavior, self).on_touch_down(touch)
def on_touch_move(self, touch):
if super(GrabBehavior, self).on_touch_move(touch):
return True
if touch.grab_current is self:
return True
return self in touch.ud
def on_touch_up(self, touch):
if touch.grab_current is self:
result = super(GrabBehavior, self).on_touch_up(touch)
touch.ungrab(self)
self.last_touch = touch
return result
class LogBehavior(object):
log_manager = LogManager(
construct_data_resource('logs/'))
def on_touch_down(self, touch):
log_manager = self.log_manager
if self in touch.ud and log_manager.do_logging:
print(self, 'in on touch dwon')
coords = (touch.x, touch.y)
log_interface = log_manager.log_interface
touch_id = log_manager.touch_id
touch.ud['log_id'] = touch_id
log_interface.set_entry(
'touches', touch_id, 'touch_down_at', coords,
do_timestamp=True)
log_manager.touch_id += 1
log_interface.set_entry(
'touches', 'last_touch_id', 'value', touch_id)
return super(LogBehavior, self).on_touch_down(touch)
def on_touch_move(self, touch):
log_manager = self.log_manager
if self in touch.ud and log_manager.do_logging:
coords = (touch.x, touch.y)
touch_id = touch.ud['log_id']
log_manager.log_interface.append_entry('touches', touch_id,
'touch_moves_at', coords, do_timestamp=True)
return super(LogBehavior, self).on_touch_move(touch)
def on_touch_up(self, touch):
log_manager = self.log_manager
if self in touch.ud and log_manager.do_logging:
coords = (touch.x, touch.y)
touch_id = touch.ud['log_id']
log_manager.log_interface.set_entry(
'touches', touch_id, 'touch_up_at', coords, do_timestamp=True)
return super(LogBehavior, self).on_touch_up(touch)
class LogNoTouchBehavior(object):
log_manager = LogManager(
construct_data_resource('logs/'))
class ButtonBehavior(object):
'''Button behavior.
:Events:
`on_press`
Fired when the button is pressed.
`on_release`
Fired when the button is released (i.e. the touch/click that
pressed the button goes away).
'''
state = OptionProperty('normal', options=('normal', 'down'))
'''State of the button, must be one of 'normal' or 'down'.
The state is 'down' only when the button is currently touched/clicked,
otherwise 'normal'.
:attr:`state` is an :class:`~kivy.properties.OptionProperty`.
'''
def __init__(self, **kwargs):
self.register_event_type('on_press')
self.register_event_type('on_release')
super(ButtonBehavior, self).__init__(**kwargs)
def _do_press(self):
self.state = 'down'
def _do_release(self):
self.state = 'normal'
def on_touch_down(self, touch):
if self in touch.ud:
if isinstance(self, LogBehavior):
log_manager = self.log_manager
if log_manager.do_logging:
if isinstance(self, CheckBox):
touch_id = touch.ud['log_id']
log_manager.log_interface.set_entry(
'touches', touch_id,
'checkbox_pressed_down', self.state,
do_timestamp=True)
else:
touch_id = touch.ud['log_id']
log_manager.log_interface.set_entry(
'touches', touch_id,
'button_pressed', self.text, do_timestamp=True)
self._do_press()
self.dispatch('on_press')
return super(ButtonBehavior, self).on_touch_down(touch)
def on_touch_move(self, touch):
return super(ButtonBehavior, self).on_touch_move(touch)
def on_touch_up(self, touch):
if self in touch.ud:
if isinstance(self, LogBehavior):
log_manager = self.log_manager
if log_manager.do_logging:
if isinstance(self, CheckBox):
touch_id = touch.ud['log_id']
log_manager.log_interface.set_entry(
'touches', touch_id,
'checkbox_released', self.state,
do_timestamp=True)
else:
touch_id = touch.ud['log_id']
log_manager.log_interface.set_entry(
'touches', touch_id, 'button_released',
self.text, do_timestamp=True)
self._do_release()
self.dispatch('on_release')
return super(ButtonBehavior, self).on_touch_up(touch)
def on_press(self):
pass
def on_release(self):
pass
def trigger_action(self, duration=0.1):
'''Trigger whatever action(s) have been bound to the button by calling
both the on_press and on_release callbacks.
This simulates a quick button press without using any touch events.
Duration is the length of the press in seconds. Pass 0 if you want
the action to happen instantly.
.. versionadded:: 1.8.0
'''
self._do_press()
self.dispatch('on_press')
def trigger_release(dt):
self._do_release()
self.dispatch('on_release')
if not duration:
trigger_release(0)
else:
Clock.schedule_once(trigger_release, duration)
class ToggleButtonBehavior(ButtonBehavior):
'''ToggleButton behavior, see ToggleButton module documentation for more
information.
.. versionadded:: 1.8.0
'''
__groups = {}
group = ObjectProperty(None, allownone=True)
'''Group of the button. If None, no group will be used (button is
independent). If specified, :attr:`group` must be a hashable object, like
a string. Only one button in a group can be in 'down' state.
:attr:`group` is a :class:`~kivy.properties.ObjectProperty`
'''
def __init__(self, **kwargs):
self._previous_group = None
super(ToggleButtonBehavior, self).__init__(**kwargs)
def on_group(self, *largs):
groups = ToggleButtonBehavior.__groups
if self._previous_group:
group = groups[self._previous_group]
for item in group[:]:
if item() is self:
group.remove(item)
break
group = self._previous_group = self.group
if group not in groups:
groups[group] = []
r = ref(self, ToggleButtonBehavior._clear_groups)
groups[group].append(r)
def _release_group(self, current):
if self.group is None:
return
group = self.__groups[self.group]
for item in group[:]:
widget = item()
if widget is None:
group.remove(item)
if widget is current:
continue
widget.state = 'normal'
def _do_press(self):
self._release_group(self)
self.state = 'normal' if self.state == 'down' else 'down'
def _do_release(self):
pass
@staticmethod
def _clear_groups(wk):
# auto flush the element when the weak reference have been deleted
groups = ToggleButtonBehavior.__groups
for group in list(groups.values()):
if wk in group:
group.remove(wk)
break
@staticmethod
def get_widgets(groupname):
'''Return the widgets contained in a specific group. If the group
doesn't exist, an empty list will be returned.
.. important::
Always release the result of this method! In doubt, do::
l = ToggleButtonBehavior.get_widgets('mygroup')
# do your job
del l
.. warning::
It's possible that some widgets that you have previously
deleted are still in the list. Garbage collector might need
more elements before flushing it. The return of this method
is informative, you've been warned!
'''
groups = ToggleButtonBehavior.__groups
if groupname not in groups:
return []
return [x() for x in groups[groupname] if x()][:]
class TouchRippleBehavior(object):
ripple_rad = NumericProperty(10)
ripple_pos = ListProperty([0, 0])
ripple_color = ListProperty((0., 0., 0., 1.))
ripple_duration_in = NumericProperty(.7)
ripple_duration_out = NumericProperty(.3)
fade_to_alpha = NumericProperty(.12)
ripple_scale = NumericProperty(4.0)
ripple_func_in = StringProperty('in_cubic')
ripple_func_out = StringProperty('out_quad')
def on_touch_down(self, touch):
if self in touch.ud:
self.anim_complete(self, self)
self.ripple_pos = ripple_pos = (touch.x, touch.y)
Animation.cancel_all(self, 'ripple_rad', 'ripple_color')
rc = self.ripple_color
ripple_rad = self.ripple_rad
self.ripple_color = [rc[0], rc[1], rc[2], .16]
anim = Animation(
ripple_rad=max(self.width, self.height) * self.ripple_scale,
t=self.ripple_func_in,
ripple_color=[rc[0], rc[1], rc[2], self.fade_to_alpha],
duration=self.ripple_duration_in)
anim.start(self)
with self.canvas.after:
x,y = self.to_window(*self.pos)
width, height = self.size
#In python 3 the int cast will be unnecessary
pos = (int(round(x)), int(round(y)))
size = (int(round(width)), int(round(height)))
if _has_scissor_instr:
ScissorPush(x=pos[0], y=pos[1],
width=size[0], height=size[1])
else:
StencilPush()
Rectangle(pos=(int(round(x)), int(round(y))),
size=(int(round(width)), int(round(height))))
StencilUse()
self.col_instruction = Color(rgba=self.ripple_color)
self.ellipse = Ellipse(size=(ripple_rad, ripple_rad),
pos=(ripple_pos[0] - ripple_rad/2.,
ripple_pos[1] - ripple_rad/2.))
if _has_scissor_instr:
ScissorPop()
else:
StencilUnUse()
Rectangle(pos=(int(round(x)), int(round(y))),
size=(int(round(width)), int(round(height))))
StencilPop()
self.bind(ripple_color=self.set_color, ripple_pos=self.set_ellipse,
ripple_rad=self.set_ellipse)
return super(TouchRippleBehavior, self).on_touch_down(touch)
def set_ellipse(self, instance, value):
ellipse = self.ellipse
ripple_pos = self.ripple_pos
ripple_rad = self.ripple_rad
ellipse.size = (ripple_rad, ripple_rad)
ellipse.pos = (ripple_pos[0] - ripple_rad/2.,
ripple_pos[1] - ripple_rad/2.)
def set_color(self, instance, value):
self.col_instruction.rgba = value
def on_touch_up(self, touch):
if self in touch.ud:
rc = self.ripple_color
anim = Animation(ripple_color=[rc[0], rc[1], rc[2], 0.],
t=self.ripple_func_out, duration=self.ripple_duration_out)
anim.bind(on_complete=self.anim_complete)
anim.start(self)
return super(TouchRippleBehavior, self).on_touch_up(touch)
def anim_complete(self, anim, instance):
self.ripple_rad = 10
self.canvas.after.clear()
class SliderTouchRippleBehavior(object):
ripple_rad = NumericProperty(10)
ripple_pos = ListProperty([0, 0])
ripple_color = ListProperty((1., 1., 1., 1.))
ripple_duration_in = NumericProperty(.2)
ripple_duration_out = NumericProperty(.5)
fade_to_alpha = NumericProperty(.75)
ripple_scale = NumericProperty(2.0)
ripple_func_in = StringProperty('in_cubic')
ripple_func_out = StringProperty('out_quad')
def __init__(self, **kwargs):
super(SliderTouchRippleBehavior, self).__init__(**kwargs)
self.slider_stencil = None
self.slider_stencil_unuse = None
self.slider_line_stencil = None
self.slider_line_stencil_unuse = None
def on_touch_down(self, touch):
if self in touch.ud:
self.anim_complete(self, self)
self.ripple_pos = ripple_pos = (touch.x, touch.y)
Animation.cancel_all(self, 'ripple_rad', 'ripple_color')
rc = self.ripple_color
ripple_rad = self.ripple_rad
self.ripple_color = [rc[0], rc[1], rc[2], 1.]
anim = Animation(
ripple_rad=max(self.width, self.height) * self.ripple_scale,
t=self.ripple_func_in,
ripple_color=[rc[0], rc[1], rc[2], self.fade_to_alpha],
duration=self.ripple_duration_in)
anim.start(self)
with self.canvas.after:
x,y = self.to_window(*self.pos)
width, height = self.size
if self.orientation == 'horizontal':
ellipse_pos = (self.value_pos[0] - sp(16), self.center_y - sp(17))
stencil_pos = (self.x + self.padding + sp(2), self.center_y - sp(7))
stencil_size = (self.width - self.padding * 2 - sp(4), sp(14))
else:
ellipse_pos = (self.center_x - sp(17), self.value_pos[1] - sp(16))
stencil_pos = (self.center_x - sp(7), self.y + self.padding + sp(2))
stencil_size = (sp(14), self.height - self.padding * 2 - sp(4))
StencilPush()
Rectangle(
pos=stencil_pos,
size=stencil_size)
self.slider_stencil = Ellipse(
pos=ellipse_pos,
size=(sp(32), sp(32)))
StencilUse(op='lequal')
self.col_instruction = Color(rgba=self.ripple_color)
self.ellipse = Ellipse(size=(ripple_rad, ripple_rad),
pos=(ripple_pos[0] - ripple_rad/2.,
ripple_pos[1] - ripple_rad/2.))
StencilUnUse()
Rectangle(
pos=stencil_pos,
size=stencil_size)
self.slider_stencil_unuse = Ellipse(
pos=ellipse_pos,
size=(sp(32), sp(32)))
StencilPop()
self.bind(ripple_color=self.set_color, ripple_pos=self.set_ellipse,
ripple_rad=self.set_ellipse)
return super(SliderTouchRippleBehavior, self).on_touch_down(touch)
def update_stencil(self):
if self.orientation == 'horizontal':
pos = [self.value_pos[0] - sp(16),
self.center_y - sp(17)]
ellipse = [self.value_pos[0] - sp(16),
self.center_y - sp(17), sp(32), sp(32)]
else:
pos = [self.center_x - sp(17),
self.value_pos[1] - sp(16)]
ellipse = [self.center_x - sp(17),
self.value_pos[1] - sp(16), sp(32), sp(32)]
if self.slider_stencil is not None:
self.slider_stencil.pos = pos
if self.slider_stencil_unuse is not None:
self.slider_stencil_unuse.pos = pos
if self.slider_line_stencil is not None:
self.slider_line_stencil.ellipse = ellipse
if self.slider_line_stencil_unuse is not None:
self.slider_line_stencil_unuse.ellipse = ellipse
def on_value_pos(self, instance, value):
self.update_stencil()
def set_ellipse(self, instance, value):
ellipse = self.ellipse
ripple_pos = self.ripple_pos
ripple_rad = self.ripple_rad
ellipse.size = (ripple_rad, ripple_rad)
ellipse.pos = (ripple_pos[0] - ripple_rad/2.,
ripple_pos[1] - ripple_rad/2.)
def set_color(self, instance, value):
self.col_instruction.rgba = value
def on_touch_up(self, touch):
if self in touch.ud:
rc = self.ripple_color
anim = Animation(ripple_color=[rc[0], rc[1], rc[2], 0.],
t=self.ripple_func_out, duration=self.ripple_duration_out)
anim.bind(on_complete=self.anim_complete)
anim.start(self)
return super(SliderTouchRippleBehavior, self).on_touch_up(touch)
def anim_complete(self, anim, instance):
self.ripple_rad = 10
self.canvas.after.clear()
self.slider_stencil = None
self.slider_stencil_unuse = None
|
mit
| 4,843,255,177,433,859,000 | 36.187135 | 88 | 0.548514 | false | 3.766436 | false | false | false |
groupe-conseil-nutshimit-nippour/django-geoprisma
|
geoprisma/acl/models.py
|
1
|
1433
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.auth.models import User, Group
from geoprisma.models import Resource, Session
#Dummy decorator if schema is not supported
def schematize(original_class):
return original_class
#Import model that support PGSQL schema if difined
if hasattr(settings, 'SCHEMATIZED_MODELS'):
try:
models = __import__(settings.SCHEMATIZED_MODELS, fromlist=['*'])
schematize = models.schematize
except ImportError:
from django.db import models
else:
from django.db import models
@schematize
class Action(models.Model):
name = models.CharField(max_length=255)
class Meta:
ordering = ('name',)
verbose_name = "Action"
verbose_name_plural = "Actions"
def __unicode__(self):
return self.name
@schematize
class Right(models.Model):
id_group = models.ForeignKey(Group)
id_resource = models.ForeignKey(Resource)
actions = models.ManyToManyField(Action)
class Meta:
ordering = ('id_group', 'id_resource',)
unique_together = ('id_group', 'id_resource',)
verbose_name = "Right"
verbose_name_plural = "Rights"
def __unicode__(self):
return "%s - %s" % (self.id_group, self.id_resource,)
|
bsd-3-clause
| -81,777,879,238,157,520 | 27.854167 | 80 | 0.593859 | false | 4.190058 | false | false | false |
product-definition-center/pdc-client
|
pdc_client/plugins/image.py
|
1
|
4868
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
from __future__ import print_function
import sys
from datetime import datetime
from pdc_client.plugin_helpers import PDCClientPlugin, add_parser_arguments, extract_arguments
info_desc = """Generally there may be duplicate file names. If the file name
you provide matches more that image, you will get a list of all those images
together with their SHA256 checksums. You desambiguate by providing the
checksum as a command line argument.
"""
def size_format(num):
fmt = '{0:.1f} {1}B'
factor = 1024.0
for unit in ('', 'Ki', 'Mi', 'Gi'):
if num < factor:
return fmt.format(num, unit)
num /= factor
return fmt.format(num, 'Ti')
class ImagePlugin(PDCClientPlugin):
command = 'image'
def register(self):
self.set_command()
list_parser = self.add_action('list', help='list all images')
list_parser.add_argument('--show-sha256', action='store_true',
help='whether to display SHA256 checksums along with the file names')
add_parser_arguments(list_parser, {'arch': {},
'compose': {},
'file_name': {},
'image_format': {},
'image_type': {},
'implant_md5': {},
'md5': {},
'sha1': {},
'sha256': {},
'volume_id': {},
'subvariant': {}},
group='Filtering')
list_parser.set_defaults(func=self.image_list)
info_parser = self.add_action('info', help='display details of an image', description=info_desc)
info_parser.add_argument('filename', metavar='FILENAME')
info_parser.add_argument('--sha256', nargs='?')
info_parser.set_defaults(func=self.image_info)
def _print_image_list(self, images, with_sha=False):
fmt = '{file_name}'
if with_sha:
fmt = '{file_name:80}{sha256}'
start_line = True
for image in images:
if start_line:
start_line = False
print(fmt.format(file_name='File-Name', sha256='SHA256'))
print()
print(fmt.format(**image))
def image_list(self, args):
filters = extract_arguments(args)
images = self.client.get_paged(self.client.images._, **filters)
if args.json:
print(self.to_json(list(images)))
return
self._print_image_list(images, args.show_sha256)
def image_info(self, args):
filters = {'file_name': args.filename}
if args.sha256:
filters['sha256'] = args.sha256
image = self.client.images._(**filters)
if image['count'] == 0:
print('Not found')
sys.exit(1)
elif image['count'] > 1:
print('More than one image with that name, use --sha256 to specify.')
self._print_image_list(image['results'], True)
sys.exit(1)
else:
image = image['results'][0]
if args.json:
print(self.to_json(image))
return
mtime = datetime.utcfromtimestamp(image['mtime'])
fmt = '{0:15} {1}'
print(fmt.format('File Name', image['file_name']))
print(fmt.format('Image Type', image['image_type']))
print(fmt.format('Image Format', image['image_format']))
print(fmt.format('Arch', image['arch']))
print(fmt.format('Disc', '{0} / {1}'.format(image['disc_number'], image['disc_count'])))
print(fmt.format('Modified', '{0} ({1})'.format(image['mtime'], mtime)))
print(fmt.format('Size', '{0} ({1})'.format(image['size'], size_format(image['size']))))
print(fmt.format('Bootable', 'yes' if image['bootable'] else 'no'))
print(fmt.format('Volume ID', image['volume_id']))
print(fmt.format('Implant MD5', image['implant_md5']))
print(fmt.format('Subvariant', image['subvariant']))
print('\nChecksums:')
print(' {0:7} {1}'.format('MD5', image['md5']))
print(' {0:7} {1}'.format('SHA1', image['sha1']))
print(' {0:7} {1}'.format('SHA256', image['sha256']))
if image['composes']:
print('\nUsed in composes:')
for compose in image['composes']:
print(' * {0}'.format(compose))
PLUGIN_CLASSES = [ImagePlugin]
|
mit
| 1,686,656,099,837,159,700 | 37.03125 | 104 | 0.513558 | false | 4.111486 | false | false | false |
OAGr/exercism
|
python/twelve-days/twelve_days.py
|
1
|
1335
|
#1 hour 15 minutes with minor distractions
def sing():
return verses(1,12)
def verse(i):
lines = []
lines.append("On the %s day of Christmas my true love gave to me" % days[i])
lines += chorus(i)
lines.append(ending(i))
lines = ', '.join(lines)
return lines + "\n"
def verses(start,stop):
return "\n".join([verse(i) for i in range(start,stop+1)]) + "\n"
def chorus(i):
present = i
chorus = []
while (present > 1):
chorus.append(lines[present])
present = present - 1
return chorus
def ending(i):
if i == 1:
return lines[1]
else:
return "and " + lines[1]
lines = [
'NAN',
'a Partridge in a Pear Tree.',
'two Turtle Doves',
'three French Hens',
'four Calling Birds',
'five Gold Rings',
'six Geese-a-Laying',
'seven Swans-a-Swimming',
'eight Maids-a-Milking',
'nine Ladies Dancing',
'ten Lords-a-Leaping',
'eleven Pipers Piping',
'twelve Drummers Drumming'
]
days = [
'NAN',
'first',
'second',
'third',
'fourth',
'fifth',
'sixth',
'seventh',
'eighth',
'ninth',
'tenth',
'eleventh',
'twelfth'
]
|
mit
| 8,918,104,149,386,228,000 | 20.532258 | 80 | 0.494382 | false | 3.201439 | false | false | false |
transientlunatic/minke
|
minke/mdctools.py
|
1
|
34706
|
"""
88b d88 88 88
888b d888 "" 88
88`8b d8'88 88
88 `8b d8' 88 88 8b,dPPYba, 88 ,d8 ,adPPYba,
88 `8b d8' 88 88 88P' `"8a 88 ,a8" a8P_____88
88 `8b d8' 88 88 88 88 8888[ 8PP"""""""
88 `888' 88 88 88 88 88`"Yba, "8b, ,aa
88 `8' 88 88 88 88 88 `Y8a `"Ybbd8"'
--------------------------------------------------------
This file is a part of Minke, a tool for generating simulated
gravitational wave signals, used for characterising and training
search algorithms.
Minke was created by Daniel Williams, based on work started by Chris
Pankow and others, and is built around the LALSimulation library.
"""
from glue.ligolw import ligolw, utils, lsctables
lsctables.use_in(ligolw.LIGOLWContentHandler);
import numpy
import lalburst, lalsimulation, lalmetaio
from minke.antenna import response
from lal import TimeDelayFromEarthCenter as XLALTimeDelayFromEarthCenter
#from pylal.xlal.datatypes.ligotimegps import LIGOTimeGPS
from lal import LIGOTimeGPS
from glue.ligolw.utils import process
import glue
import glue.ligolw
import gzip
import lal, lalframe
import numpy as np
import pandas as pd
import os
import os.path
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import re
import random
import minke
from minke import sources
sourcemap = {}
for classin in dir(sources):
classin = sources.__dict__[classin]
if hasattr(classin, "waveform"):
sourcemap[classin.waveform] = classin
def source_from_row(row):
waveform = row.waveform
sourceobj = sourcemap[row.waveform].__new__(sourcemap[row.waveform])
sourceobj.numrel_data = str("")
params = {}
for attr in dir(row):
if not attr[0] == "_" and not attr[:3] =="get":
#print attr
try:
params[attr] = getattr(row, attr)
setattr(sourceobj, attr, getattr(row, attr))
except AttributeError:
print("Error processing the {} column".format(attr))
sourceobj.params = params
try:
sourceobj.time = row.time_geocent_gps
except:
sourceobj.time = row.geocent_start_time
pass
return sourceobj
def source_from_dict(params):
sourceobj = sourcemap[params['morphology']].__new__(sourcemap[params['morphology']])
sourceobj.numrel_data = str("")
params = {}
for attr in dir(row):
if not attr[0] == "_" and not attr[:3] =="get":
#print attr
params[attr] = getattr(row, attr)
setattr(sourceobj, attr, getattr(row, attr))
sourceobj.params = params
try:
sourceobj.time = row.time_geocent_gps
except:
sourceobj.time = row.geocent_start_time
pass
return sourceobj
table_types = {
# Ad-Hoc
"ga" : lsctables.SimBurstTable,
"sg" : lsctables.SimBurstTable,
"wnb" : lsctables.SimBurstTable,
"sc" : lsctables.SimBurstTable,
# Supernova Families
"d08" : lsctables.SimBurstTable,
"s10" : lsctables.SimBurstTable,
"m12" : lsctables.SimBurstTable,
"o13" : lsctables.SimBurstTable,
"y10" : lsctables.SimBurstTable,
# Long Duration
"adi" : lsctables.SimBurstTable,
# Ringdown
"rng" : lsctables.SimRingdownTable,
"gng" : lsctables.SimRingdownTable,
}
tables = {
"burst" : lsctables.SimBurstTable,
"ringdown" : lsctables.SimRingdownTable
}
def mkdir(path):
"""
Make all of the tree of directories in a given path if they don't
already exist.
Parameters
----------
path : str
The path to the desired directory.
"""
sub_path = os.path.dirname(path)
if not os.path.exists(sub_path):
mkdir(sub_path)
if not os.path.exists(path):
os.mkdir(path)
class TableTypeError(Exception):
pass
class MDCSet():
inj_families_names = {'ga' : 'Gaussian',
'sg' : 'SineGaussian',
'wnb': 'BTLWNB',
"sc" : "StringCusp",
# Supernova families
'd08' : 'Dimmelmeier+08',
's10' : 'Scheidegger+10',
'm12' : 'Mueller+12',
'o13' : 'Ott+13',
'y10' : "Yakunin+10",
# Long-duration
'adi' : 'ADI',
# Ringdown
'rng' : "BBHRingdown",
'gng' : "GenericRingdown",
}
inj_families_abb = dict((v,k) for k,v in list(inj_families_names.items()))
hist_parameters = {
"StringCusp": ["amplitude", "ra", "dec"],
"SineGaussian": ["hrss", "psi", "ra", "dec"],
"Gaussian": ["hrss", "psi", "ra", "dec"],
"BTLWNB": ["hrss", "ra", "dec"],
"Dimmelmeier+08": ['hrss', 'ra', 'dec']
}
waveforms = []
def __init__(self, detectors, name='MDC Set', table_type = "burst"):
"""
Represents an MDC set, stored in an XML SimBurstTable file.
Parameters
----------
detectors : list
A list of detector names where the injections should be made.
name : str
A name for the MDC Set. Defaults to 'MDC Set'.
table_type : str
The type of table which should be generated. Default is `burst`,
which generates a SimBurstTable.
"""
self.detectors = detectors
self.waveforms = []
self.strains = []
self.egw = []
self.times = []
self.name = name
self.times = np.array(self.times)
self.table_type = tables[table_type]
def __add__(self, waveform):
"""
Handle a waveform being added to the MDC set.
Parameters
----------
waveform : Waveform object
The waveform which should be added to the MDC set.
"""
# Check that this type of waveform can go into this type of
# XML file.
if not table_types[self.inj_families_abb[waveform.waveform]] == self.table_type:
raise TableTypeError()
self.waveforms.append(waveform)
self.times = np.append(self.times, waveform.time)
def save_xml(self, filename):
"""
Save the MDC set as an XML SimBurstTable.
Parameters
----------
filename : str
The location to save the xml file. The output is gzipped, so ending it with
a ".gz" would stick with convention.
"""
xmldoc = ligolw.Document()
lw = xmldoc.appendChild(ligolw.LIGO_LW())
sim = lsctables.New(self.table_type)
lw.appendChild(sim)
# This needs to be given the proper metadata once the package has the maturity to
# write something sensible.
for waveform in self.waveforms:
procrow = process.register_to_xmldoc(xmldoc, "minke_burst_mdc+{}".format(minke.__version__), {}) # waveform.params)
try:
waveform_row = waveform._row(sim)
waveform_row.process_id = procrow.process_id
except:
row = sim.RowType()
for a in list(self.table_type.validcolumns.keys()):
if a in list(waveform.params.keys()):
setattr(row, a, waveform.params[a])
else:
if not hasattr(waveform, a):
setattr(row, a, 0)
else:
setattr(row, a, getattr(waveform, a))
row.waveform = waveform.waveform
if self.table_type == lsctables.SimBurstTable:
# Fill in the time
row.set_time_geocent(GPS(float(waveform.time)))
# Get the sky locations
row.ra, row.dec, row.psi = waveform.ra, waveform.dec, waveform.psi
row.simulation_id = waveform.simulation_id
row.waveform_number = random.randint(0,int(2**32)-1)
### !! This needs to be updated.
row.process_id = "process:process_id:0" #procrow.process_id
waveform_row = row
sim.append(waveform_row)
#del waveform_row
# Write out the xml and gzip it.
utils.write_filename(xmldoc, filename, gz=True)
def load_xml(self, filename, full=True, start=None, stop=None):
"""Load the MDC Set from an XML file containing the SimBurstTable.
Parameters
----------
filename : str
The filename of the XML file.
full : bool
If this is true (which is the default) then all of
the calculated parameters are computed from the waveform
definintion.
start : float
The time at which the xml read-in should
start. The default is "None", in which case the xml file
will be read-in from the start.
end : float
The last time to be read from the xml file. The default is None,
which causes the xml to be read right-up to the last time in the
file.
To Do
-----
A the moment this loads the information in to the object, but it
doesn't produce waveform objects for each of the injections in the
file. This should be fixed so that the object works symmetrically.
"""
i = 0
#sim_burst_table = lalburst.SimBurstTableFromLIGOLw(filename, start, stop)
xml = glue.ligolw.utils.load_filename(filename,
contenthandler = glue.ligolw.ligolw.LIGOLWContentHandler,
verbose = True)
sim_burst_table = glue.ligolw.table.get_table(xml, self.table_type.tableName)
for i,simrow in enumerate(sim_burst_table):
# This is an ugly kludge to get around the poor choice of wavform name in the xmls, and
if simrow.waveform[:3]=="s15":
self.numrel_file = str(sim_burst_table.waveform)
sim_burst_table.waveform = "Dimmelmeier+08"
self.waveforms.append(source_from_row(simrow))
if full:
self._measure_hrss(i)
self._measure_egw_rsq(i)
if self.table_type == tables["burst"]:
self.times = np.append(self.times, float(simrow.time_geocent_gps))
def _generate_burst(self,row,rate=16384.0):
"""
Generate the burst described in a given row, so that it can be
measured.
Parameters
----------
row : SimBurst Row
The row of the waveform to be measured
rate : float
The sampling rate of the signal, in Hz. Defaults to 16384.0Hz
Returns
-------
hp :
The strain in the + polarisation
hx :
The strain in the x polarisation
hp0 :
A copy of the strain in the + polarisation
hx0 :
A copy of the strain in the x polarisation
"""
row = self.waveforms[row]
hp, hx, hp0, hx0 = row._generate()
return hp, hx, hp0, hx0
def _getDetector(self, det):
"""
A method to return a LALDetector object corresponding to a detector's
X#-style name, e.g. 'H1' as the Hanford 4km detector.
Parameters
----------
det : str
A string describing the detector in the format letter-number, e.g
"H1" would be the Hanford 4km detector, "L1" would be the
Livingston 4km, and so-forth.
Returns
-------
detector : LALDetector
The LAL object describing the detector
"""
# get detector
return lalsimulation.DetectorPrefixToLALDetector(det)
#if det not in lal.cached_detector_by_prefix.keys():
# raise ValueError, "%s is not a cached detector. "\
# "Cached detectors are: %s" % (det, inject.cached_detector.keys())
#return lal.cached_detector_by_prefix[det]
def _timeDelayFromGeocenter(self, detector, ra, dec, gpstime):
"""
Calculate the time delay between the geocentre and a given detector
for a signal from some sky location.
Parameters
----------
detector : str
A string describing the detector, e.g. H1 is the Hanford 4km
detector.
ra : float
The right-ascension of the observation in radians
dec : float
The declination of the obser
"""
if isinstance(detector, str): detector = self._getDetector(detector)
gpstime = LIGOTimeGPS(float(gpstime))
return XLALTimeDelayFromEarthCenter(detector.location, ra, dec, gpstime)
def directory_path(self):
"""
Generate the directory where the frames from this MDC should be stored,
so, e.g. Gaussians 0d100 would go in "ga/ga0d100/"
Returns
-------
str
the folder structure
"""
name = self._simID(0)
abb = self.inj_families_abb[self.waveforms[0].waveform].lower()
return "{}/{}".format(abb, name)
def _simID(self, row):
"""
Generate a name for an injection set in the format expected by cWB
Parameters
----------
row : SimBurst
The simburst table row describing the injection
Returns
-------
str
The name of the injection in the cWB format
"""
row = self.waveforms[row]
name = ''
numberspart = ''
if row.waveform in ("Dimmelmeier+08", "Scheidegger+10", "Mueller+12", "Ott+13", "Yakunin+10"):
#print row
numberspart = os.path.basename(row.params['numrel_data']).split('.')[0]
if row.waveform == "Gaussian":
numberspart = "{:.3f}".format(row.duration * 1e3)
elif row.waveform == "SineGaussian":
if row.pol_ellipse_e==1.0:
pol="linear"
elif row.pol_ellipse_e==0.0:
pol="circular"
elif 0.0<row.pol_ellipse_e<1.0:
pol = "elliptical"
else:
pol = "inclined"
numberspart = "f{:.0f}_q{:.0f}_{}".format(row.frequency, row.q, pol)
elif row.waveform == "BTLWNB":
numberspart = "{}b{}tau{}".format(row.frequency, row.bandwidth, row.duration)
name += '{}_{}'.format(self.inj_families_abb[row.waveform].lower(), numberspart).replace('.','d')
return name
def _measure_hrss(self, row, rate=16384.0):
"""
Measure the various components of hrss (h+^2, hx^2, hphx) for a given
input row. This is accomplished by generating the burst and calling
the SWIG wrapped XLALMeasureHrss in lalsimulation.
Parameters
----------
row : int
The row number of the waveforms to be measured
rate : float
The sampling rate of the signal, in Hz. Defaults to 16384.0Hz
Returns
-------
hrss : float
The measured hrss of the waveform amplitude: sqrt(|Hp|^2 + |Hx|^2)
hphp : float
The hrss of the + polarisation only.
hxhx : float
The hrss of the x polarisation only.
hphx : float
The hrss of |HpHx|
"""
row = self.waveforms[row]
hp, hx, hp0, hx0 = row._generate() #self._generate_burst(row)# self.hp, self.hx, self.hp0, self.hx0
hp0.data.data *= 0
hx0.data.data *= 0
# H+ hrss only
hphp = lalsimulation.MeasureHrss(hp, hx0)**2
# Hx hrss only
hxhx = lalsimulation.MeasureHrss(hp0, hx)**2
# sqrt(|Hp|^2 + |Hx|^2)
hrss = lalsimulation.MeasureHrss(hp, hx)
hp.data.data = numpy.abs(hx.data.data) + numpy.abs(hp.data.data)
# |H+Hx|
hphx = (lalsimulation.MeasureHrss(hp, hx0)**2 - hrss**2)/2
#print hrss
self.strains.append([hrss, hphp, hxhx, hphx])
def _measure_egw_rsq(self, row, rate=16384.0):
"""
Measure the energy emitted in gravitational waves divided
by the distance squared in M_solar / pc^2. This is accomplished
by generating the burst and calling the SWIG wrapped
XLALMeasureHrss in lalsimulation.
Parameters
----------
row : int
The row number of the waveforms to be measured
rate : float
The sampling rate of the signal, in Hz. Defaults to 16384.0Hz
Returns
-------
egw : float
The energy emitted in gravitational waves divided
by the distance squared in M_solar / pc^2.
"""
hp, hx, _, _ = self._generate_burst(row)
self.egw.append(lalsimulation.MeasureEoverRsquared(hp, hx))
def _responses(self, row):
"""
Calculate the antenna repsonses for each detector to the waveform.
Parameters
----------
row : int
The row number of the waveforms to be measured
Returns
-------
responses : list of lists
A list containing the lists of antenna responses, with the first
element of each list containing the detector acronym.
"""
output = []
row = self.waveforms[row]
for detector in self.detectors:
time = row.time_geocent_gps + self._timeDelayFromGeocenter(detector, row.ra, row.dec, row.time_geocent_gps)
time = np.float64(time)
rs = response(time, row.ra, row.dec, 0, row.psi, 'radians', detector)
output.append([detector, time, rs[0], rs[1]] )
return output
def plot_skymap(self):
"""
Plot a skymap of the injections distribution in RA and DEC on a Hammer projection.
Returns
-------
matplotlib figure
"""
fig = plt.figure()
# Load the ra and dec numbers out of the waveforms
dec = [getattr(s, 'dec') for s in self.waveforms]
ra = [getattr(s, 'ra') for s in self.waveforms]
# Make the plot on a hammer projection
plt.subplot(111, projection='hammer')
H, x, y = np.histogram2d(ra, dec, [50, 25], range=[[0, 2*np.pi], [-np.pi/2, np.pi/2]])
dist = plt.pcolormesh(x-np.pi,y, H.T, cmap="viridis")
plt.title("Sky distribution")
plt.colorbar(dist, orientation='horizontal')
return fig
def plot_hist(self, parameter):
"""
Plot a histogram of a waveform parameter.
Parameters
----------
parameter : str
The name of the simburst table parameter which is desired for the plot.
Returns
-------
matplotlib figure
"""
fig = plt.figure()
prms = [getattr(s, parameter) for s in self.waveforms]
ax2 = plt.subplot(111)
ax2.set_title("{} distribution".format(parameter))
ax2.set_xlabel(parameter)
ax2.hist(prms, bins=100, log=True, histtype="stepfilled", alpha=0.6);
return fig
def gravEn_row(self, row, frame):
"""
Produces a gravEn-style log row for a row of the simBurstTable.
Parameters
----------
row : int
The row number of the waveforms to be measured
Returns
-------
str
A string in the gravEn format which describes the injection.
"""
strains = self.strains[row]
rowname = self._simID(row)
responses = self._responses(row)
energy = self.egw[row]
row = self.waveforms[row]
output = []
if not row.incl:
cosincl = ""
else:
cosincl = np.cos(row.incl)
output.append(self.name) # GravEn_SimID
output.append(strains[0]) # SimHrss
output.append(energy) # SimEgwR2
output.append(strains[0]) # GravEn_Ampl
output.append(cosincl) # Internal_x the cosine of the angle the LOS makes with axis of angular momentum
output.append(row.phi) # Intenal_phi angle between source x-axis and the LOS
output.append(np.cos(np.pi/2.0 - row.dec)) # cos(External_x) # this needs to be the co-declination
output.append(row.ra if row.ra < np.pi else row.ra - 2*np.pi)
# ^ External_phi # This is the RA projected onto an Earth-based coordinate system
output.append(row.psi) # External_psi # source's polarisation angle
output.append(frame.start) # FrameGPS
output.append(row.time_geocent_gps) # EarthCtrGPS
output.append(rowname) # SimName
output.append(strains[1]) # SimHpHp
output.append(strains[2]) # SimHcHc
output.append(strains[3]) # SimHpHp
output.append(" ".join(" ".join(map(str,l)) for l in responses))
return ' '.join(str(e) for e in output)
class Frame():
"""
Represents a frame, in order to prepare the injection frames
"""
def __init__(self, start, duration, ifo, number = -1):
"""
Parameters
----------
number : int
The frame's number within the project. Defaults to -1.
"""
self.start = start
self.duration = duration
self.end = self.start + duration
self.ifos = ifo
self.number = -1
def __repr__(self):
out = ''
out += "MDC Frame \n"
for ifo in self.ifos:
out += "{} {} {} \n".format(ifo, self.start, self.duration)
return out
def get_rowlist(self,mdcs):
"""
Return the rows from an MDC set which correspond to this frame.
Parameters
----------
mdcs : MDCSet object
The set of MDCs from which the rows are to be found.
"""
return np.where((mdcs.times<self.end)&(mdcs.times>self.start))[0]
def calculate_n_injections(self, mdcs):
return len(mdcs.times[(mdcs.times<self.end)&(mdcs.times>self.start)])
def generate_log(self,mdc):
log = '# GravEn_SimID SimHrss SimEgwR2 GravEn_Ampl Internal_x Internal_phi External_x External_phi External_psi FrameGPS EarthCtrGPS SimName SimHpHp SimHcHc SimHpHc H1 H1ctrGPS H1fPlus H1fCross L1 L1ctrGPS L1fPlus L1fCross\n'
rowlist = self.get_rowlist(mdc)
for row in rowlist:
log += mdc.gravEn_row(row, self)
log += "\n"
return log
def generate_gwf(self, mdc, directory, project = "Minke", channel="SCIENCE", force=False, rate=16384.0):
"""
Produce the gwf file which corresponds to the MDC set over the period of this frame.
Parameters
----------
mdc : MDCSet object
The MDC set which should be used to produce this frame.
directory : str
The root directory where all of the frames are to be stored, for example
"/home/albert.einstein/data/mdc/frames/"
would cause the SineGaussian injections to be made in the directories under
"/home/albert.einstein/data/mdc/frames/sg"
project : str
The name of the project which this frame is a part of. Defaults to 'Minke'.
channel : str
The name of the channel which the injections should be made into. This is prepended by the initials
for each interferometer, so there will be a channel for each interferometer in the gwf.
force : bool
If true this forces the recreation of a GWF file even if it already exists.
Outputs
-------
gwf
The GWF file for this frame.
"""
ifosstr = "".join(set(ifo[0] for ifo in self.ifos))
family = mdc.waveforms[0].waveform
epoch = lal.LIGOTimeGPS(self.start)
filename = "{}-{}-{}-{}.gwf".format(ifosstr, family, self.start, self.duration)
self.frame = lalframe.FrameNew(epoch = epoch,
duration = self.duration, project='', run=1, frnum=1,
detectorFlags=lal.LALDETECTORTYPE_ABSENT)
ifobits = np.array([getattr(lal,"{}_DETECTOR_BIT".format(lal.cached_detector_by_prefix[ifo].frDetector.name.upper()))
for ifo in self.ifos])
ifoflag = numpy.bitwise_or.reduce(ifobits)
RUN_NUM = -1 # Simulated data should have a negative run number
head_date = str(self.start)[:5]
frameloc = directory+"/"+mdc.directory_path()+"/"+head_date+"/"
mkdir(frameloc)
if not os.path.isfile(frameloc + filename) or force:
epoch = lal.LIGOTimeGPS(self.start)
frame = lalframe.FrameNew(epoch, self.duration, project, RUN_NUM, self.number, ifoflag)
data = []
# Loop through each interferometer
for ifo in self.ifos:
# Calculate the number of samples in the timeseries
nsamp = int((self.end-self.start)*rate)
# Make the timeseries
h_resp = lal.CreateREAL8TimeSeries("{}:{}".format(ifo, channel), epoch, 0, 1.0/rate, lal.StrainUnit, nsamp)
# Loop over all of the injections corresponding to this frame
rowlist = self.get_rowlist(mdc)
if len(rowlist)==0: return
for row in rowlist:
sim_burst = mdc.waveforms[row]._row()
if sim_burst.hrss > 1:
distance = sim_burst.amplitude
else:
distance = None
#hp, hx = lalburst.GenerateSimBurst(sim_burst, 1.0/rate);
hp, hx, _, _ = mdc.waveforms[row]._generate(rate=rate, half=True, distance=distance)
# Apply detector response
det = lalsimulation.DetectorPrefixToLALDetector(ifo)
# Produce the total strains
h_tot = lalsimulation.SimDetectorStrainREAL8TimeSeries(hp, hx,
sim_burst.ra, sim_burst.dec, sim_burst.psi, det)
# Inject the waveform into the overall timeseries
lalsimulation.SimAddInjectionREAL8TimeSeries(h_resp, h_tot, None)
lalframe.FrameAddREAL8TimeSeriesSimData(frame, h_resp)
# Make the directory in which to store the files
# if it doesn't exist already
mkdir(frameloc)
# Write out the frame file
lalframe.FrameWrite(frame, frameloc+filename)
class HWInj(Frame):
"""
Represents a hardware injection frame.
Injection frames must be an ASCII file of the hoft sampled at
the antenna sampling rate, appropriately convolved with an
antenna response function.
As a result of the simplicity of this specific output format
we do not need information such as start-time in the file itself,
however we should have a sensible naming scheme for the ASCII files
since they will need to be produced as sidecars for an xml file.
"""
def __init__(self, ifos):
"""We'll need to know the start-time, the duration, and the ifo
for each which is to be used for hardware injections in order
to keep consistency with the data in the xml file, and so that the
appropriate waveform is injected into the appropriate detector.
Parameters
----------
ifos : list
The name of the interferometers, e.g. "L1" for the Livingston, LA LIGO detector.
"""
self.ifos = ifos
def __repr__(self):
"""
The printable representation of this object.
"""
out = ""
out += "Hardware MDC Frame \n"
for ifo in self.ifos:
out += "{} \n".format(ifo)
return out
def generate_pcal(self, mdc, directory, force = False, rate=16384):
"""
Produce the PCAL-ready hardware injection files as an ASCII list
sampled at the detector's sample rate.
Parameters
----------
mdc : MDCSet object
The signal set which should be used to generate the frame.
directory : str
The root directory where all of the frames are to be stored, for example
"/home/albert.einstein/data/mdc/frames/"
would cause the SineGaussian injections to be made in the directories under
"/home/albert.einstein/data/mdc/frames/sg"
force : bool
If true this forces the regeneration of the file, even if it
already exists.
Outputs
-------
ascii file
The ASCII file containing the correctly sampled waveform convolved with
the antenna pattern.
"""
family = mdc.waveforms[0].waveform
frameloc = os.path.join(directory, (mdc.directory_path()))
#rowlist = self.get_rowlist(mdc)
# Unlike with a conventional frame, we need to produce a separate file
# for each IFO.
for ifo in self.ifos:
for sim_burst in mdc.waveforms:
#sim_burst = mdc.waveforms[row]
# Check if the file exists, or if we're forcing the creation
filename = "{}_{}_{}.txt".format(family,
sim_burst.time,
ifo)
if not os.path.isfile(frameloc + filename) or force:
data = []
epoch = lal.LIGOTimeGPS(sim_burst.time)
duration = 10
nsamp = duration*rate
h_tot = sim_burst._generate_for_detector([ifo], sample_rate=rate)
data = np.array(h_tot.data.data)
np.savetxt(filename, data)
class HWFrameSet():
def __init__(self, ifos=["H1", "L1"]):
"""
A collection of hardware injection frames.
Parameters
----------
frame_list : str
The filespath of a CSV file containing the list of frames,
and the parameters required to produce them: the start and
duration times, and the interferometers they describe.
"""
self.frames = []
self.frames = [HWInj(ifos)]
#self.frames.append(frame)
def full_frameset(self, mdc, directory, force=False):
"""
Produce the gwf files which corresponds to the MDC set over the period of the frames in this collection.
Parameters
----------
mdc : MDCSet object
The MDC set which should be used to produce this frame.
directory : str
The root directory where all of the frames are to be stored, for example
"/home/albert.einstein/data/mdc/frames/"
would cause the SineGaussian injections to be made in the directories under
"/home/albert.einstein/data/mdc/frames/sg"
force : bool
If true this forces the recreation of a GWF file even if it already exists.
Outputs
-------
ascii files
The ASCII files for these hardware injections.
"""
for frame in self.frames:
frame.generate_pcal(mdc, directory, force)
class FrameSet():
def __init__(self, frame_list):
"""
A collection of frames.
Parameters
----------
frame_list : str
The filespath of a CSV file containing the list of frames,
and the parameters required to produce them: the start and
duration times, and the interferometers they describe.
"""
self.frames = []
self.frame_list = frame_list = pd.read_csv(frame_list)
for frame in frame_list.iterrows():
frame = frame[1]
ifos = frame['ifo'].replace("['",'').replace("']",'').replace("'",'').split(' ')
frame = Frame(frame['start time'],frame['duration'],ifos)
self.frames.append(frame)
def full_frameset(self, mdc, directory, channel="SCIENCE", force=False):
"""
Produce the gwf files which corresponds to the MDC set over the period of the frames in this collection.
Parameters
----------
mdc : MDCSet object
The MDC set which should be used to produce this frame.
directory : str
The root directory where all of the frames are to be stored, for example
"/home/albert.einstein/data/mdc/frames/"
would cause the SineGaussian injections to be made in the directories under
"/home/albert.einstein/data/mdc/frames/sg"
channel : str
The name of the channel which the injections should be made into. This is prepended by the initials
for each interferometer, so there will be a channel for each interferometer in the gwf.
force : bool
If true this forces the recreation of a GWF file even if it already exists.
Outputs
-------
gwf files
The GWF files for these frames.
"""
for frame in self.frames:
frame.generate_gwf(mdc, directory, channel, force)
def full_logfile(self, mdc, location):
"""
Produce a log file for the entire frame set
"""
full_log = ''
for frame in self.frames:
full_log += frame.generate_log(mdc)
with open(location, "w") as text_file:
text_file.write(full_log)
|
isc
| -1,914,375,457,587,391,000 | 35.687104 | 291 | 0.54953 | false | 3.95555 | false | false | false |
niutool/niuforum
|
forum/utils.py
|
1
|
3033
|
import re
from PIL import Image, ImageOps
from io import BytesIO
from django.contrib.auth.models import User
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.http import HttpResponseForbidden
from django.shortcuts import get_object_or_404
from django.core.files.uploadedfile import SimpleUploadedFile
from forum.models import Topic
MENTION_REGEX = re.compile(r'@(\w+)', re.M)
IMAGE_LARGE = 144
IMAGE_MEDIUM = 96
IMAGE_SMALL = 48
NUM_PER_PAGE = 20
def _thumbnail(upload, size, fmt):
img = ImageOps.fit(upload, size, Image.ANTIALIAS)
temp = BytesIO()
img.save(temp, fmt, quality=95)
temp.seek(0)
return temp
def create_thumbnail(src, new_name, ext):
upload = Image.open(BytesIO(src.read()))
fmt = src.content_type.split('/')[-1]
large = _thumbnail(upload, (IMAGE_LARGE, IMAGE_LARGE), fmt)
filename_l = "%s_l.%s" % (new_name, ext)
large_file = SimpleUploadedFile(filename_l, large.read(), content_type=src.content_type)
medium = _thumbnail(upload, (IMAGE_MEDIUM, IMAGE_MEDIUM), fmt)
filename_m = "%s_m.%s" % (new_name, ext)
medium_file = SimpleUploadedFile(filename_m, medium.read(), content_type=src.content_type)
small = _thumbnail(upload, (IMAGE_SMALL, IMAGE_SMALL), fmt)
filename_s = "%s_s.%s" % (new_name, ext)
small_file = SimpleUploadedFile(filename_s, small.read(), content_type=src.content_type)
return large_file, medium_file, small_file
def get_pagination(current_page, num_pages, count):
page_list = []
show_pages = 2*count+1
if show_pages >= num_pages:
page_list.extend(range(1, num_pages+1))
elif current_page - count < 1:
page_list.extend(range(1, show_pages+1))
elif current_page + count > num_pages:
page_list.extend(range(num_pages+1-show_pages, num_pages+1))
else:
page_list.extend(range(current_page-count, current_page+count+1))
return page_list
def topic_pagination(page, topics):
paginator = Paginator(topics, NUM_PER_PAGE)
try:
topic_list = paginator.page(page)
except PageNotAnInteger:
topic_list = paginator.page(1)
except EmptyPage:
topic_list = paginator.page(paginator.num_pages)
page_list = get_pagination(topic_list.number, paginator.num_pages, 2)
return topic_list, page_list
def author_required(view_func):
def _wrapped_view_func(request, *args, **kwargs):
topic_id = kwargs.get('topic_id')
topic = get_object_or_404(Topic, id=topic_id)
if topic.author == request.user:
return view_func(request, *args, **kwargs)
else:
return HttpResponseForbidden()
return _wrapped_view_func
def get_metioned_user(sender, markdown):
mentioned = set(re.findall(MENTION_REGEX, markdown)) - set([sender.username])
# mentioned = set(re.findall(MENTION_REGEX, markdown))
if mentioned:
return User.objects.filter(username__in=mentioned)
return None
|
mit
| 2,211,155,211,056,811,500 | 32.7 | 94 | 0.667326 | false | 3.430995 | false | false | false |
rodrigorm/golimar
|
plugin/python/golimar/ui/vimui.py
|
1
|
8614
|
#!/usr/bin/env python
#
# vimui.py
#
# User Interface for Vim
import functools
import vim
class Ui:
def __init__(self, skype):
self.skype = skype
self.is_open = False
self.tabnr = None
def open(self):
if self.is_open:
return
# try:
self.messages = MessagesWindow(self, 'tabnew')
self.messages.create()
self.tabnr = vim.eval('tabpagenr()')
self.friends = FriendsWindow(self, 'vertical belowright new')
self.friends.create()
vim.command('vertical resize 40')
self.chats = ChatsWindow(self, 'belowright new')
self.chats.create()
vim.command('resize +5')
self.messages.focus()
self.compose = ComposeWindow(self, 'rightbelow new')
self.compose.create()
vim.command('resize 5')
self.is_open = True
self.update()
# except Exception as e:
# self.is_open = False
# raise e
def composeMessage(self):
return self.compose.message()
def composeClean(self):
self.compose.clean()
def setChat(self, chat):
self.messages.setChat(chat)
if self.has_focus():
self.messages.markAsSeen()
self.compose.clean()
self.compose.focus()
self.update()
def render(self):
self.friends.update()
self.chats.update()
self.messages.update()
def bind(self):
self.skype.RegisterEventHandler('MessageStatus', self.MessageStatus)
self.skype.RegisterEventHandler('UserStatus', self.UserStatus)
self.skype.RegisterEventHandler('ConnectionStatus', self.UserStatus)
def unbind(self):
self.skype.UnregisterEventHandler('MessageStatus', self.MessageStatus)
self.skype.UnregisterEventHandler('UserStatus', self.UserStatus)
self.skype.UnregisterEventHandler('ConnectionStatus', self.UserStatus)
def MessageStatus(self, message, status):
self.update()
def UserStatus(self, status):
self.update()
def update(self):
self.unbind()
self.render()
if self.has_focus():
self.messages.markAsSeen()
self.bind()
def has_focus(self):
return self.is_open and vim.eval('tabpagenr()') == self.tabnr
def selectedFriend(self):
return self.friends.selected()
def selectedChat(self):
return self.chats.selected()
class Window:
name = 'WINDOW'
open_cmd = 'new'
buftype = 'nofile'
def __init__(self, ui, open_cmd):
self.buffer = None
self.ui = ui
self.open_cmd = open_cmd
self.is_open = False
def create(self):
""" create window """
vim.command('silent %s %s' % (self.open_cmd, self.name))
vim.command('setlocal buftype=%s modifiable winfixheight winfixwidth\
nobackup noswapfile' % (self.buftype))
self.buffer = vim.current.buffer
self.is_open = True
self.on_create()
def on_create(self):
""" callback """
def clean(self):
if self.buffer_empty():
return
self.buffer[:] = []
def write(self, msg, return_focus=True, after='normal G'):
self._return_focus(self.__curry(self._write, msg, after), return_focus)
def _write(self, msg, after='normal G'):
if not self.is_open:
self.create()
if self.buffer_empty():
self.buffer[:] = str(msg).split('\n')
else:
self.buffer.append(str(msg).split('\n'))
self.command(after)
def buffer_empty(self):
if len(self.buffer) == 1 \
and len(self.buffer[0]) == 0:
return True
else:
return False
def command(self, cmd):
""" go to my window & execute command """
winnr = self.getwinnr()
if winnr != int(vim.eval("winnr()")):
vim.command(str(winnr) + 'wincmd w')
vim.command(cmd)
def getwinnr(self):
return int(vim.eval("bufwinnr('"+self.name+"')"))
def set_line(self, lineno, return_focus=True):
self._return_focus(self.__curry(self._set_line, lineno), return_focus)
def _set_line(self, lineno):
self.focus()
vim.command("normal %sgg" % str(lineno))
def get_line(self):
return int(self._return_focus(self.__curry(self._get_line), True))
def _get_line(self):
self.focus()
return vim.current.range.start
def eval(self, cmd):
return self._return_focus(self.__curry(self._eval, cmd), True)
def _eval(self, cmd):
self.focus()
return vim.eval(cmd)
def focus(self):
vim.command(str(self.winnr()) + "wincmd w")
def winnr(self):
return int(vim.eval("bufwinnr('" + self.name + "')"))
def _return_focus(self, callback, flag=True):
if flag:
return self.__return_focus(callback)
else:
return callback()
def __return_focus(self, callback):
prev_win = vim.eval('winnr()')
result = callback()
vim.command('%swincmd W' % prev_win)
return result
def __curry(self, callback, *args):
return functools.partial(callback, *args)
class FriendsWindow(Window):
name = "Friends"
def on_create(self):
self.update()
vim.command('\
nnoremap <buffer> <cr> :python golimar.openSelectedFriend()\
<cr>')
vim.command('set filetype=golimarfriends')
def update(self):
self.clean()
for user in self.ui.skype.Friends:
self.write('(%s) %s' % (user.OnlineStatus, user.Handle))
self.set_line(0)
def selected(self):
return self.ui.skype.Friends[self.get_line()]
class ChatsWindow(Window):
name = "Chats"
def on_create(self):
self.update()
vim.command('nnoremap <buffer> <cr> :python golimar.openSelectedChat()<cr>')
def update(self):
self.clean()
for chat in self.ui.skype.RecentChats:
self.write(self._topic(chat) + self._unseen(chat))
self.set_line(0)
def _topic(self, chat):
if chat.Topic == '':
for member in chat.Members:
if member.Handle != self.ui.skype.CurrentUser.Handle:
return member.Handle
else:
return chat.Topic.encode('utf-8')
def _unseen(self, chat):
count = self.unseenCount(chat)
if count:
return ' [%i]' % (count)
return ''
def unseenCount(self, chat):
result = 0
for message in chat.RecentMessages:
if message.Status == 'RECEIVED':
result += 1
return result
def selected(self):
return self.ui.skype.RecentChats[self.get_line()]
class MessagesWindow(Window):
name = 'Skype'
def on_create(self):
self.chat = None
vim.command('set filetype=golimarchat')
def setChat(self, chat):
self.chat = chat
self.update()
def update(self):
self.clean()
if self.chat is None:
return
biggerName = 0
for message in self.chat.RecentMessages:
if len(message.FromHandle) > biggerName:
biggerName = len(message.FromHandle)
biggerName += 2
width = self.width()
pad = 21 + 1 + biggerName + 1 + 1
for message in self.chat.RecentMessages:
datetime = str(message.Datetime)
userFrom = '%s' % (message.FromHandle)
userFrom = userFrom.rjust(biggerName)
body = self.__body(message.Body.encode('utf-8'), width, pad)
self.write('[%s] %s: %s' % (datetime, userFrom, body))
def __body(self, body, width, pad):
lines = str(body).split('\n')
result = []
for line in lines:
result.extend(self.__split_str_into_len(str(line), width - pad - 4))
return ('\n' + (' ' * pad)).join(result)
def width(self):
return int(self.eval('winwidth(0)'))
def __split_str_into_len(self, s, l=2):
""" Split a string into chunks of length l """
return [s[i:i+l] for i in range(0, len(s), l)]
def markAsSeen(self):
if self.chat is None:
return
for message in self.chat.RecentMessages:
if message.Status == 'RECEIVED':
message.MarkAsSeen()
class ComposeWindow(Window):
name = 'Compose'
buftype = 'acwrite'
def message(self):
return '\n'.join(self.buffer)
|
gpl-2.0
| -7,399,287,538,600,110,000 | 25.262195 | 84 | 0.565823 | false | 3.714532 | false | false | false |
DmitriySalnikov/godot
|
modules/mono/build_scripts/godot_net_sdk_build.py
|
11
|
1755
|
# Build Godot.NET.Sdk solution
import os
from SCons.Script import Dir
def build_godot_net_sdk(source, target, env):
# source and target elements are of type SCons.Node.FS.File, hence why we convert them to str
module_dir = env["module_dir"]
solution_path = os.path.join(module_dir, "editor/Godot.NET.Sdk/Godot.NET.Sdk.sln")
build_config = "Release"
from .solution_builder import build_solution
extra_msbuild_args = ["/p:GodotPlatform=" + env["platform"]]
build_solution(env, solution_path, build_config, extra_msbuild_args)
# No need to copy targets. The Godot.NET.Sdk csproj takes care of copying them.
def get_nupkgs_versions(props_file):
import xml.etree.ElementTree as ET
tree = ET.parse(props_file)
root = tree.getroot()
return {
"Godot.NET.Sdk": root.find("./PropertyGroup/PackageVersion_Godot_NET_Sdk").text.strip(),
"Godot.SourceGenerators": root.find("./PropertyGroup/PackageVersion_Godot_SourceGenerators").text.strip(),
}
def build(env_mono):
assert env_mono["tools"]
output_dir = Dir("#bin").abspath
editor_tools_dir = os.path.join(output_dir, "GodotSharp", "Tools")
nupkgs_dir = os.path.join(editor_tools_dir, "nupkgs")
module_dir = os.getcwd()
nupkgs_versions = get_nupkgs_versions(os.path.join(module_dir, "SdkPackageVersions.props"))
target_filenames = [
"Godot.NET.Sdk.%s.nupkg" % nupkgs_versions["Godot.NET.Sdk"],
"Godot.SourceGenerators.%s.nupkg" % nupkgs_versions["Godot.SourceGenerators"],
]
targets = [os.path.join(nupkgs_dir, filename) for filename in target_filenames]
cmd = env_mono.CommandNoCache(targets, [], build_godot_net_sdk, module_dir=module_dir)
env_mono.AlwaysBuild(cmd)
|
mit
| 5,912,477,984,069,791,000 | 30.909091 | 114 | 0.688889 | false | 3.095238 | false | false | false |
epam/DLab
|
infrastructure-provisioning/src/general/scripts/aws/edge_start.py
|
1
|
3048
|
#!/usr/bin/python
# *****************************************************************************
#
# Copyright (c) 2016, EPAM SYSTEMS INC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ******************************************************************************
from dlab.fab import *
from dlab.actions_lib import *
import sys
if __name__ == "__main__":
local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
os.environ['request_id'])
local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s',
level=logging.DEBUG,
filename=local_log_filepath)
# generating variables dictionary
create_aws_config_files()
print('Generating infrastructure names and tags')
edge_conf = dict()
edge_conf['service_base_name'] = os.environ['conf_service_base_name']
edge_conf['instance_name'] = edge_conf['service_base_name'] + "-" + os.environ['edge_user_name'] + '-edge'
edge_conf['tag_name'] = edge_conf['service_base_name'] + '-Tag'
logging.info('[START EDGE]')
print('[START EDGE]')
try:
start_ec2(edge_conf['tag_name'], edge_conf['instance_name'])
except Exception as err:
append_result("Failed to start edge.", str(err))
sys.exit(1)
except:
sys.exit(1)
try:
instance_hostname = get_instance_hostname(edge_conf['tag_name'], edge_conf['instance_name'])
addresses = get_instance_ip_address(edge_conf['tag_name'], edge_conf['instance_name'])
ip_address = addresses.get('Private')
public_ip_address = addresses.get('Public')
print('[SUMMARY]')
logging.info('[SUMMARY]')
print("Instance name: {}".format(edge_conf['instance_name']))
print("Hostname: {}".format(instance_hostname))
print("Public IP: {}".format(public_ip_address))
print("Private IP: {}".format(ip_address))
with open("/root/result.json", 'w') as result:
res = {"instance_name": edge_conf['instance_name'],
"hostname": instance_hostname,
"public_ip": public_ip_address,
"ip": ip_address,
"Action": "Start up notebook server"}
print(json.dumps(res))
result.write(json.dumps(res))
except:
print("Failed writing results.")
sys.exit(0)
|
apache-2.0
| -5,064,682,080,044,262,000 | 39.64 | 110 | 0.580052 | false | 4.02642 | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.