code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
import glypher as g
import traceback
from aobject.utils import debug_print
import re
import pango
import pangocairo
import cairo
import Image
render_library = {}
class GlypherRendering :
shape = None
size = None
height = None
width = None
image = None
size = 20.0
def __init__(self, shape, code = None, setup = False, italic = False) :
self.shape = shape
if setup : self.setup(shape, code)
def setup(self, shape, code = None) :
self.image = cairo.SVGSurface(None, self.size/2, self.size)
cr = cairo.Context(self.image)
cr.set_source_rgb(1.0,1.0,1.0)
cr.rectangle(0, 0, self.size/2, self.size)
self.height = g.stp*self.size; self.width = g.stp*self.size/2;
cr.fill()
def draw(self, size, cr = None, l = 0, r = 0, colour = [0.0, 0.0, 0.0]) :
cr.save()
cr.set_source_rgb(1.0,1.0,1.0)
cr.rectangle(0, 0, size/2, size)
cr.fill()
cr.restore()
class GlypherPangoRendering (GlypherRendering):
font = "sans"
#font = { 'normal' : "sans", 'italic' : "sans italic", 'bold' : "sans bold", 'bold italic' : "sans italic bold" }
def get_font(self, font_name = None, italic = False, bold = False) :
return (self.font if font_name is None else font_name)+(" bold" if bold else "")+(" italic" if italic else "")
#return self.font[('bold italic' if bold else 'italic') if italic else ('bold' if bold else 'normal')]
def draw(self, size, cr = None, l = 0, r = 0, colour = [0.0, 0.0, 0.0], ink = False, italic = False, bold = False, font_name = None) :
if cr == None :
image = cairo.ImageSurface(cairo.FORMAT_ARGB32, int(size), int(size))
cr = cairo.Context(image)
cr.set_source_rgb(*colour)
pcr = pangocairo.CairoContext(cr)
layout = pcr.create_layout()
#layout.set_font_description(pango.FontDescription("LMMathItalic12 "+str(size/2)))
layout.set_font_description(pango.FontDescription(self.get_font(font_name, italic, bold)+' '+str(size/2)))
layout.set_text(self.shape)
met = layout.get_pixel_extents()
cr.save()
cr.translate(met[1][0],met[1][1])
if ink :
height = met[0][3]; width = met[0][2]
cr.translate(- (met[0][0]-met[1][0]),- (met[0][1]-met[1][1]))
else :
height = met[1][3]; width = met[1][2]
asc = layout.get_context().get_metrics(layout.get_font_description()).get_ascent()/pango.SCALE
desc = layout.get_context().get_metrics(layout.get_font_description()).get_descent()/pango.SCALE
#layout = pcr.create_layout()
#layout.set_font_description(pango.FontDescription("LMMathItalic12 "+str(size/2)))
#layout.set_font_description(pango.FontDescription(self.get_font(font_name, italic, bold)+' '+str(size/2)))
#layout.set_text(self.shape)
pcr.show_layout(layout)
cr.restore()
delta = (met[0][0]-met[1][0], met[0][0]+met[0][2]-met[1][0]-met[1][2],
met[0][1]-met[1][1], met[0][1]+met[0][3]-met[1][1]-met[1][3])
# FIXME: this is a bit of a botch to guess where the midline of an equals or minus lies
return (height, width, height*0.5 if ink else desc+0.5*(asc-desc), delta)
class GlypherPangoCMURendering (GlypherPangoRendering):
font = "CMU Serif"
#font = { 'normal' : "CMU Serif", 'italic' : "CMU Serif italic", \
# 'bold' : "CMU Serif bold", \
# 'bold italic' : "CMU Serif italic bold" }
class GlypherPangoLLRendering (GlypherPangoRendering):
font = "Linux Libertine"
class GlypherPangoCMMapRendering (GlypherPangoRendering):
font = "cmmi10"
cm_size_coeff = 1.0
cm_vertical_offset = 0.0
def __init__(self, shape, code = None, setup = False) :
cmi = get_cm_index()
if shape in cmi :
self.font = cmi[shape][0]
self.cm_size_coeff = cmi[shape][2]
self.cm_vertical_offset = cmi[shape][3]
shape = cmi[shape][1]
GlypherPangoRendering.__init__(self, shape, code, setup)
def draw(self, size, cr = None, l = 0, r = 0, colour = [0.0, 0.0, 0.0]) :
if (cr) : cr.save(); cr.translate(0, -self.cm_vertical_offset)
(rh,rw) = GlypherPangoRendering.draw(self, size*self.cm_size_coeff, cr, l, r, colour)
if (cr) : cr.restore()
return (rh,rw)
class GlypherTeXRendering (GlypherRendering):
def setup(self, shape, size, code = None) :
self.shape = shape
self.size = size
plain = tex.Style("plain", "", "\\bye\n")
codestr = string.replace(shape, ' ', '_')
codestr = string.replace(codestr, '\\', '@')
codestr = string.replace(codestr, '$', '#')
fn = "/tmp/aes-glypher-TeX_" + codestr
if not os.path.exists(fn+'.png') :
(dvi, log) = tex.tex(plain, shape if code == None else code)
dvi.write(open(fn+'.dvi', 'wb'))
os.system('convert -crop 8x20+90+67 ' + fn + '.dvi ' + fn +'.png 2> /dev/null')
self.image = cairo.ImageSurface.create_from_png(fn+'.png')
self.height = self.image.get_height()
self.width = self.image.get_width()
#os.system('rm ' + fn + '*')
#render_library[(' ',10)] = GlypherPangoRendering(' ', 10, None, False)
#render_library[(' ',10)].image = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 20)
def find_rendering(shape, code = None, italic = False, bold = False) :
if g.use_rendering_library :
if not (shape, italic) in render_library :
render_library[(shape, italic)] = GlypherPangoLLRendering(shape, code, italic=italic, bold=bold)
return render_library[(shape, italic)]
else :
return GlypherPangoLLRendering(shape, code, italic=italic) | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glypher/Renderers.py | Renderers.py |
import glypher as g
import rsvg
import StringIO
from types import *
from Toolbox import *
from aobject.utils import debug_print
import gtk
from aobject.paths import *
from aobject import aobject
try :
import sympy
import sympy.parsing.maxima
have_sympy = True
except ImportError :
have_sympy = False
from Interpret import *
from Caret import *
from Phrase import *
from Parser import *
import draw
debugging = False
phrasegroups_dir = get_user_location() + 'phrasegroups/'
display_cache = {}
class GlyphDisplay() :
main_phrase = None
line_height = 100
def __init__(self) :
self.main_phrase = GlypherMainPhrase(None,
self.line_height, self.line_height,
(0.0,0.0,0.0), is_decorated=False,
by_bbox=True)
self.main_phrase.set_is_caching(True)
def render(self, xml) :
str_xml = ET.tostring(xml.getroot())
if len(str_xml) in display_cache and \
str_xml in display_cache[len(str_xml)] :
return display_cache[len(str_xml)][str_xml]
# Do stuff
pg = parse_phrasegroup(self.main_phrase, xml, top=False)
self.main_phrase.empty()
self.main_phrase.adopt(pg)
self.main_phrase.draw()
ims = self.main_phrase.cairo_cache_image_surface
if len(str_xml) not in display_cache :
display_cache[len(str_xml)] = {}
display_cache[len(str_xml)][str_xml] = ims
return ims
class GlyphCellRenderer(gtk.GenericCellRenderer) :
__gproperties__ = {
"obj": (gobject.TYPE_PYOBJECT, "obj", "Aesthete object", gobject.PARAM_READWRITE),
"width": (gobject.TYPE_INT, "width", "Width", 0, 1000, 0, gobject.PARAM_READWRITE),
"height": (gobject.TYPE_INT, "height", "Height", 0, 1000, 0, gobject.PARAM_READWRITE),
}
default_font_name = "Monospace"
line_height = 25
image_surface = None
def __init__(self) :
self.__gobject_init__()
self.obj = None
self.set_font_description("%s %f"%(self.default_font_name,
0.4*self.line_height))
def do_set_property(self, pspec, value) :
setattr(self, pspec.name, value)
def do_get_property(self, pspec) :
return getattr(self, pspec.name)
def set_font_description(self, fd) :
self.pango_font_description = pango.FontDescription(fd)
def _render_xml(self, cr, xml, rat) :
ci = display.render(xml)
if cr is not None :
cr.save()
cr.scale(rat, rat)
cr.translate(-10, -10)
cr.set_source_surface(ci, 0, 0)
cr.paint()
cr.restore()
def on_render(self, window, widget, background_area, cell_area, expose_area,
flags) :
cr = window.cairo_create()
cr.save()
cr.translate(cell_area.x, cell_area.y)
if self.obj is None :
return
xml = None
rat = self.line_height/float(display.line_height)
if isinstance(self.obj, aobject.AObject) and \
self.obj.get_aesthete_xml() is not None :
xml = self.obj.get_aesthete_xml()
elif isinstance(self.obj, ET._ElementTree) :
xml = self.obj
#rat = rat*float(xml.getroot().get('height'))
elif isinstance(self.obj, GlypherEntity) :
xml = ET.ElementTree(self.obj.get_xml())
else :
cp = widget.get_pango_context()
layout = pango.Layout(cp)
layout.set_font_description(self.pango_font_description)
layout.set_text(self.obj.get_aname_nice())
widget.style.paint_layout(window, gtk.STATE_NORMAL, True,
cell_area, widget, 'text',
cell_area.x, cell_area.y,
layout)
if xml is not None :
self._render_xml(cr, xml, rat)
cr.restore()
def on_get_size(self, widget, cell_area=None):
x = 0
y = 0
if cell_area is not None :
x = cell_area.x
y = cell_area.y
rat = self.line_height/float(display.line_height)
if self.obj is None :
w = self.line_height
h = self.line_height
elif isinstance(self.obj, aobject.AObject) and \
self.obj.get_aesthete_xml() is not None :
xml = self.obj.get_aesthete_xml()
w = int(round(self.line_height*float(xml.getroot().get('width'))))
h = int(round(self.line_height*float(xml.getroot().get('height'))))
elif isinstance(self.obj, ET._ElementTree) :
w = int(round(self.line_height*float(self.obj.getroot().get('width'))))
h = int(round(self.line_height*float(self.obj.getroot().get('height'))))
elif self.image_surface is not None :
w = int(rat*(self.image_surface.get_width()-20))
h = int(rat*(self.image_surface.get_height()-20))
elif isinstance(self.obj, GlypherEntity) :
w = int(self.obj.get_width())
h = int(self.obj.get_height())
else :
#FIXME: CACHE OR SOMETHING!!
cp = widget.get_pango_context()
layout = pango.Layout(cp)
layout.set_font_description(self.pango_font_description)
layout.set_text(self.obj.get_aname_nice())
text_extents = layout.get_pixel_extents()[0]
x = text_extents[0]
y = text_extents[1]
w = text_extents[2]-x
h = text_extents[3]-y
return (x, y, w, h)
#ims = self.main_phrase.cairo_cache_image_surface
gobject.type_register(GlyphCellRenderer)
class GlyphImage(gtk.DrawingArea) :
caret = None
margins = [0, 0, 0, 0]
default_width = 200
default_height = 100
line_height = 25
who_is_phrased_to = "None yet"
who_is_attached_to = "None"
__gsignals__ = { \
"expose-event" : "override", \
"content-changed" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
()),
"request-plot" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_STRING,)),
}
def do_content_changed(self, o = None) :
self.queue_draw()
def set_font_size(self, font_size) :
self.line_height = font_size
self._auto_resize()
def __init__(self, position=(0, 0)):
gtk.DrawingArea.__init__(self)
self.container = self
container = self
self.set_size_request(self.default_width, self.default_height)
self.set_property("can-focus", False)
self.position = position
self.connect('content-changed', self.do_content_changed)
#self.caret.font_size = 40
self.grab_focus()
def set_status(self, text) :
pass
def clear(self) :
self.xml = None
self.image_surface = None
def set_who_is_phrased_to(self, string) :
self.who_is_phrased_to = string
def set_who_is_attached_to(self, string) :
self.who_is_attached_to = string
show_phrase_order = False
def do_expose_event(self, event):
cr = self.window.cairo_create()
cr.rectangle ( event.area.x, event.area.y, event.area.width, event.area.height)
cr.clip()
self.draw(cr, *self.window.get_size())
image_surface = None
def draw(self, cr, swidth, sheight):
cr.save()
#cr.set_source_rgb(1.0, 1.0, 1.0)
#cr.rectangle(0, 0, swidth, sheight); cr.fill()
cr.save()
cr.translate(*self.position)
if self.image_surface is not None :
ci = self.image_surface
if cr is not None :
rat = self.line_height/float(display.line_height)
cr.scale(rat, rat)
cr.translate(-10, -10)
cr.set_source_surface(ci, 0, 0)
cr.paint()
cr.restore()
cr.restore()
#def _resize_to_allocation(self, allocation=None) :
# if allocation is not None :
# self.default_height = allocation.height
# self.default_width = allocation.width
def _auto_resize(self) :
if self.image_surface is not None :
m = self.margins
rat = self.line_height/float(display.line_height)
self.set_size_request(int(round(rat*(self.image_surface.get_width()-20)))+m[0]+m[2],
int(round(rat*(self.image_surface.get_height()-20)))+m[1]+m[3])
def set_xml(self, xml, insert=False, top=False) :
self.xml = xml
self.image_surface = display.render(xml)
self._auto_resize()
def get_xml(self, full = False) :
return self.xml
class GlyphEntry(gtk.Layout) :
caret = None
main_phrase = None
margins = [5, 5, 5, 5]
default_width = 200
default_height = 100
line_height = 45
who_is_phrased_to = "None yet"
who_is_attached_to = "None"
draw_corner_art = False
corner_art_width = 200.
suspend_corner_art = False
__gsignals__ = { \
"expose-event" : "override", \
"key-press-event" : "override",\
"key-release-event" : "override",\
"button-press-event" : "override",\
"button-release-event" : "override",\
"scroll-event" : "override",\
"status-update" : (gobject.SIGNAL_RUN_LAST,gobject.TYPE_NONE,
(gobject.TYPE_STRING,)),
"content-changed" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
()),
"size-allocate" : "override",\
"processed-line" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
()),
"request-plot" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_STRING,)),
"caret-moved" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
()),
}
def do_size_allocate(self, allocation) :
gtk.Layout.do_size_allocate(self, allocation)
if not self.fixed_main_phrase :
self.main_phrase.set_area((0, allocation.height-self.position[1]))
self.suspend_corner_art = allocation.height < 200
self._resize_to_allocation(allocation)
def do_content_changed(self, o = None) :
self.queue_draw()
def get_font_size(self) :
return self.line_height
def set_font_size(self, font_size) :
self.line_height = font_size
self.main_phrase.set_font_size(font_size)
self.main_phrase.child_change()
self._auto_resize()
self.queue_draw()
def __init__(self, position=(5, 5), interactive=True,
resize_to_main_phrase=False, evaluable=False,
fixed_main_phrase=False, dec_with_focus=True,
margins=None, corner_art=False):
gtk.Layout.__init__(self)
self.container = self
container = self
self.interactive = interactive
self.fixed_main_phrase = fixed_main_phrase
self.resize_to_main_phrase = resize_to_main_phrase
self.evaluable = evaluable
self.dec_with_focus = dec_with_focus
if margins is not None :
self.margins = margins
if corner_art :
svg = rsvg.Handle(get_share_location()+'images/glypher_corner.svg')
if svg is not None :
ci = cairo.ImageSurface(cairo.FORMAT_ARGB32,
svg.props.width,
svg.props.height)
cr = cairo.Context(ci)
cr.scale(float(self.corner_art_width)/ci.get_width(),
float(self.corner_art_width)/ci.get_width())
svg.render_cairo(cr)
self.corner_art_image_surface = ci
self.draw_corner_art = True
self.clipboard = gtk.Clipboard()
self.set_size_request(self.default_width, self.default_height)
self.set_property("can-focus", interactive)
self.add_events(gtk.gdk.KEY_PRESS_MASK)
self.add_events(gtk.gdk.KEY_RELEASE_MASK)
self.add_events(gtk.gdk.BUTTON_PRESS_MASK)
self.add_events(gtk.gdk.BUTTON_RELEASE_MASK)
self.add_events(gtk.gdk.POINTER_MOTION_MASK)
self.add_events(gtk.gdk.SCROLL_MASK)
self.connect("focus-in-event", lambda w, e : self.queue_draw())
self.connect("focus-out-event", lambda w, e : self.queue_draw())
ps = self.process_main_phrase_signal
self.position = position
self.main_phrase = GlypherMainPhrase(ps,
self.line_height, self.line_height,
(0.0,0.0,0.0), is_decorated=True,
by_bbox=True)
self.caret = GlypherCaret(self.main_phrase, interactive=interactive,
container=container, glypher=self)
global caret
caret = self.caret
caret.connect('content-changed', lambda o : self.emit('content-changed'))
self.connect('content-changed', self.do_content_changed)
#self.caret.font_size = 40
self.main_phrase.line_length = self.get_allocation().width
self.grab_focus()
self.caret.new_phrase(self.main_phrase)
#self.caret.enter_phrase(self.main_phrase, at_start = True)
#self.caret.new_word()
#self.main_phrase.set_shows_active(False)
#self.response_phrase.set_shows_active(False)
self.caret.connect("changed-phrased-to", lambda o, s, l : self.set_who_is_phrased_to(l))
self.caret.connect("changed-attached-to", lambda o, s, l : self.set_who_is_attached_to(l))
self.reset_main_phrase()
self.main_phrases = [self.main_phrase]
self.main_phrases_offsets = {self.main_phrase : self.position}
self._auto_resize()
def process_line(self) :
self.caret.remove_boxes()
try :
input_line, response = self.response_processor()
except GlypherTargetPhraseError as e :
debug_print("Error : " + str(e))
l = GlypherLabelBox(str(e),\
widget_parent=self.container, attached_to=e.tp,
caret=self.caret,
box_colour=(0.9, 0.3, 0.3))
self.caret.boxes.append(l)
debug_print("Error : " + str(e))
return None, None
self.emit("processed-line")
return input_line, response
def set_status(self, text) :
pass
def do_info(self, entity) :
title = entity.get_title()
if title is None :
title = "<i>%s</i>" % entity.mes[-1]
info_text = entity.get_info_text()
if info_text is None :
info_text = "<i>No info text</i>"
wiki_link = entity.get_wiki_link()
dialog = gtk.Dialog('Entity info', None,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
(gtk.STOCK_OK, gtk.RESPONSE_ACCEPT))
vbox = dialog.get_content_area()
info_head_labl = gtk.Label()
info_head_labl.set_markup('<b>%s</b>' % title)
vbox.pack_start(info_head_labl, False)
info_labl = gtk.Label()
info_labl.set_line_wrap(True)
info_labl.set_size_request(400, -1)
info_text = re.sub("\\n", " ", info_text)
info_labl.set_markup(info_text.strip())
vbox.pack_start(info_labl)
if wiki_link is not None :
vbox.pack_start(gtk.HSeparator())
wiki_gtk_link = \
"http://en.wikipedia.org/wiki/Special:Search?search=%s" % \
wiki_link
wiki_gtk_link = gtk.LinkButton(wiki_gtk_link, wiki_link)
wiki_hbox = gtk.HBox()
wiki_hbox.pack_start(gtk.Label("Wiki:"), False)
wiki_hbox.pack_start(wiki_gtk_link)
vbox.pack_start(wiki_hbox, False)
vbox.show_all()
dialog.get_action_area().get_children()[-1].grab_focus()
dialog.run()
dialog.destroy()
def do_bracket_warning(self) :
dialog = gtk.Dialog('Open parentheses..?', None,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
(gtk.STOCK_OK, gtk.RESPONSE_ACCEPT))
vbox = dialog.get_content_area()
info_head_labl = gtk.Label()
info_head_labl.set_markup('<b>Glypher Tip</b>')
vbox.pack_start(info_head_labl, False)
info_labl = gtk.Label()
info_labl.set_line_wrap(True)
info_labl.set_size_request(600, -1)
info_labl.set_markup(\
"""
Don't forget that, as a Glypher expression is actually a tree of subexpressions
in disguise, for the moment, ordinary parentheses aren't used. To do something
to an expression, hit <i>Left</i> or <i>Right</i> until the expression on which
you wish to operate is enclosed in blue Caret brackets. Then go for it - hit ^,
+, *, or whatever takes your fancy! If <i>Aesthete</i> reckons they're needed
for clarity, it'll show brackets, but they're always there implicitly.
If you are looking for a matrix, try <i>Alt+(</i> . Using <i>Super+Left/Down</i>
gives you extra rows and cols. You may also be looking for <i>Ctrl+(</i> which
turns a Word into a function.
""")
vbox.pack_start(info_labl)
vbox.show_all()
dialog.run()
dialog.destroy()
def clear(self) :
self.reset_main_phrase()
def reset_main_phrase(self, space_array=None) :
self.main_phrase.empty()
self.caret.attached_to = None
self.caret.enter_phrase(self.main_phrase)
def set_who_is_phrased_to(self, string) :
self.who_is_phrased_to = string
self.emit('caret-moved')
def set_who_is_attached_to(self, string) :
self.who_is_attached_to = string
self.emit('caret-moved')
show_phrase_order = False
#def do_expose_event(self, event):
# cr = self.window.cairo_create()
# cr.rectangle ( event.area.x, event.area.y, event.area.width, event.area.height)
# cr.clip()
# self.draw(cr, *self.window.get_size())
# self.show_all()
def do_expose_event(self, event):
gtk.Layout.do_expose_event(self, event)
cr = self.get_bin_window().cairo_create()
cr.rectangle ( event.area.x, event.area.y, event.area.width, event.area.height)
cr.clip()
self.draw(cr, *self.get_bin_window().get_size())
def draw(self, cr, swidth, sheight, force_dec=None):
cr.save()
draw_corner_art = self.draw_corner_art and not self.suspend_corner_art
bg_colour = (1.,1.,1.)
if draw_corner_art :
bg_colour = (0.95, 0.95, 1.0)
cr.set_source_rgb(*bg_colour)
cr.rectangle(0, 0, swidth, sheight)
cr.fill()
if self.draw_corner_art :
ci = self.corner_art_image_surface
if ci is not None :
cr.save()
cr.translate(self.allocation.width-self.corner_art_width, 0)
cr.set_source_surface(ci)
cr.paint()
cr.restore()
#cr.move_to(0, 20)
#cr.set_font_size(14); cr.set_source_rgb(0.0, 0.0, 0.0)
#cr.show_text('GlyphMaker')
if debugging :
cr.move_to(140, 20)
cr.show_text("Phrased to "+self.who_is_phrased_to)
cr.move_to(140, 40)
cr.show_text("Attached to "+self.who_is_attached_to)
cr.move_to(10, 28); cr.rel_line_to(100, 0)
cr.stroke()
cr.save()
cr.translate(*self.position)
if draw_corner_art :
bb = list(self.main_phrase.config[0].bbox)
bb[0] -= 10
bb[1] -= 10
bb[2] = self.allocation.width - 30 - bb[0] - self.position[0]
bb[3] = bb[3]-bb[1]
box_colour = (0.9, 0.9, 1.0)
draw.draw_box(cr, box_colour, bb)
if (force_dec is None and self.dec_with_focus and (self.has_focus() != \
self.main_phrase.show_decoration())) or\
(force_dec is not None and force_dec != self.main_phrase.show_decoration()) :
self.main_phrase.set_is_decorated(self.has_focus())
self.main_phrase.draw(cr)
if (force_dec is None and self.main_phrase.show_decoration()) or\
force_dec :
self.caret.draw(cr)
if self.caret.symbol_active :
self.caret.attached_to.draw_alternatives(cr)
cr.restore()
if self.show_phrase_order : self.draw_phrase_order(cr)
cr.restore()
def do_button_press_event(self, event) :
if not self.interactive :
return False
for m in self.main_phrases :
x, y = self._local_coords_for_main_phrase(m,(event.x, event.y))
if fc(m.find_distance((x, y)), 0) :
target = m.find_nearest(point=(x,y), fall_through=True, enterable_parent=False)
if not target[1] or not target[1].process_button_press(event) :
debug_print((event.x,event.y,event.button))
if (event.button == 1) :
debug_print(m.config[0].bbox)
debug_print(m.config[0].basebox)
target = m.find_nearest\
(point=(x,y), fall_through=True, enterable_parent=True)
debug_print(target)
if target[1] : debug_print(target[1].format_me())
self.caret.change_attached(target[1])
self.queue_draw()
def do_button_release_event(self, event) :
self.grab_focus()
for m in self.main_phrases :
x, y = self._local_coords_for_main_phrase(m,(event.x, event.y))
if fc(m.find_distance((x, y)), 0) :
target = m.find_nearest(point=(x,y), fall_through=True, enterable_parent=False)
self.queue_draw()
def do_scroll_event(self, event) :
debug_print('h')
for m in self.main_phrases :
x, y = self._local_coords_for_main_phrase(m,(event.x, event.y))
if fc(m.find_distance((x, y)), 0) :
target = m.find_nearest(point=(x,y), fall_through=True, enterable_parent=False)
if not target[1] or not target[1].process_scroll(event) :
_scaling = 1.2 if event.direction == gtk.gdk.SCROLL_UP else 1/1.2
m.set_size_scaling(_scaling*m.get_ip('font_size_coeff'))
self.queue_draw()
def do_key_release_event(self, event):
keyname = gtk.gdk.keyval_name(event.keyval)
self.caret.process_key_release(keyname, event)
self.queue_draw()
def do_key_press_event(self, event):
if not self.interactive :
return
keyname = gtk.gdk.keyval_name(event.keyval)
m_control = bool(event.state & gtk.gdk.CONTROL_MASK)
m_shift = bool(event.state & gtk.gdk.SHIFT_MASK)
m_alt = bool(event.state & gtk.gdk.MOD1_MASK)
m_super = bool(event.state & gtk.gdk.SUPER_MASK)
debug_print(keyname)
g.dit = False
if m_super and m_control :
self.caret.process_key_press(keyname, event)
self.queue_draw()
elif (keyname == 'k' and m_control and not m_shift and not m_alt) or (keyname == 'K' and m_control and not m_shift and not m_alt) :
self.caret.delete_from_shape()
elif (keyname == 'u' and m_control and not m_shift and not m_alt) or (keyname == 'U' and m_control and not m_shift and not m_alt) :
self.caret.delete_to_shape()
elif (keyname == 'percent' and m_control) :
ref = self.caret.insert_named('response_reference')
ref.IN().adopt(make_word('r'+str(len(self.responses)), ref))
self.caret.change_attached(ref, outside=True)
elif keyname == 'BackSpace' and m_control and not m_alt :
self.reset_main_phrase()
elif m_control and not m_alt and not m_super and keyname == 'x' :
self.copy(cut=True)
elif m_control and not m_alt and not m_super and keyname == 'c' :
self.copy(cut=False)
elif m_control and not m_alt and not m_super and keyname == 'v' :
self.paste_text(xml=True)
elif m_control and not m_alt and not m_super and keyname == 'V' :
self.paste_text()
elif m_control and not m_alt and not m_super and keyname == 'Y' :
self.paste_text(alternative=True)
elif m_control and not m_alt and m_super and keyname == 'y' :
self.paste_text(verbatim=True)
elif self.evaluable and not m_control and not m_alt and not m_super and keyname == 'Return' :
debug_print(self.process_line())
elif not m_control and not m_alt and keyname=='F12' :
g.show_rectangles = not g.show_rectangles
self.queue_draw()
else :
ret = self.caret.process_key_press(keyname, event)
self.queue_draw()
return ret
self.queue_draw()
self.emit("content-changed") #FIXME: THIS SHOULD CHECK THAT A CHANGE IN FACT OCCURS
return False
def response_processor(self) :
return (self.main_phrase, interpret_sympy(None, self.main_phrase.get_sympy()))
def draw_phrase_order(self, cr) :
if not self.caret.attached_to or not self.caret.attached_to.am("phrase"): return
ents = self.caret.attached_to.sort_entities()
cr.save()
cr.translate(40, 200)
across_dist = 0
down_dist = 0
bbox_0 = None
cr.set_source_rgb(0.5,0.5,0.5)
for ent in ents :
cr.save()
cr.translate(-ent.config[0].bbox[0] + across_dist, -ent.config[0].bbox[1] + down_dist)
ent.draw(cr)
cr.rectangle(ent.config[0].bbox[0], ent.config[0].bbox[1], ent.config[0].bbox[2]-ent.config[0].bbox[0], ent.config[0].bbox[3]-ent.config[0].bbox[1])
cr.stroke()
cr.restore()
if bbox_0 is None or ent.config[0].bbox[0] != bbox_0 :
down_dist = 0
across_dist += ent.config[0].bbox[2]-ent.config[0].bbox[0]+5
else :
down_dist += ent.config[0].bbox[3]-ent.config[0].bbox[1]+5
bbox_0 = ent.config[0].bbox[0]
cr.restore()
def process_main_phrase_signal(self, main_phrase, signal, data = None) :
if signal == 'copy' :
self.copy(data[1], contents=data[2])
if data[0] : self.paste_text(alternative=False)
return True
if signal == 'recalc' :
self._auto_resize()
self.queue_draw()
return False
def _resize_to_allocation(self, allocation=None) :
pass
def _auto_resize(self) :
if self.resize_to_main_phrase :
m = self.margins
self.set_size_request(int(round(self.main_phrase.get_width()))+m[0]+m[2],
int(round(self.main_phrase.get_height()))+m[1]+m[3])
def copy(self, contents = False, cut = False, fmt='xml') :
#e = entity.copy()
#self.clipboard.set_text(e.get_repr() if not contents else " ".join([f.get_repr() for f in e.get_entities()]))
try :
copied = self.caret.copy(cut=cut, fmt=fmt)
except RuntimeError as reason :
self.set_status('Could not copy %s: %s' % (fmt, reason))
return
if fmt == 'xml' :
string = StringIO.StringIO()
copied.write(string, encoding="utf-8")
string = string.getvalue()
else :
string = copied
if len(string) < 100 :
self.set_status('Copied %s : %s' % (fmt, string.replace('\n', '; ')))
else :
self.set_status('Copied %s' % fmt)
self.clipboard.set_text(string)
def paste_text(self, verbatim = False, alternative = False, xml = False) :
debug_print(xml)
text = self.clipboard.request_text(self.do_request_clipboard,
(verbatim, alternative, xml))
def do_request_clipboard(self, clipboard, text, paste_text_args) :
self.caret.paste_text(text, *paste_text_args)
def set_xml(self, xml, insert=False, top=False) :
pg = parse_phrasegroup(self.main_phrase, xml, top=top)
if not insert :
if pg.am('space_array') :
self.reset_main_phrase(space_array=pg)
else :
self.reset_main_phrase()
self.main_phrase.adopt(pg)
#self.caret.insert_entity(pg)
def get_sympy(self) :
return self.main_phrase.get_sympy()
def get_text(self) :
return self.main_phrase.to_string()
def get_xml(self, full = False) :
root = self.main_phrase.get_xml(targets={}, top=False,
full=full)
debug_print(ET.tostring(root))
xml = ET.ElementTree(root)
return xml
def clear(self) :
self.reset_main_phrase()
def _local_coords_for_main_phrase(self, m, point) :
x = point[0] - self.main_phrases_offsets[m][0]
y = point[1] - self.main_phrases_offsets[m][1]
return x, y
class GlyphBasicGlypher(GlyphEntry, aobject.AObject) :
container = None
caret = None
main_phrase = None
margins = [10, 40, 0, 10]
default_width = -1
default_height = 160
def do_content_changed(self, o = None) :
self.queue_draw()
def __init__(self, name_root="GlypherBasicGlypher", position = (10, 40),
env = None, evaluable = True):
GlyphEntry.__init__(self, position=position, interactive=True,
evaluable=evaluable, fixed_main_phrase=True,
dec_with_focus=False)
aobject.AObject.__init__(self, name_root, env, view_object=True)
self.main_phrase.set_by_bbox(False)
self.main_phrase.set_enterable(False)
self.main_phrase.set_attachable(False)
self.reset_main_phrase()
self.caret.enter_phrase(self.main_phrase)
self.main_phrases = [self.main_phrase]
self.main_phrases_offsets = {self.main_phrase : self.position}
def __del__(self) :
aobject.AObject.__del__(self)
#PROPERTIES
def get_auto_aesthete_properties(self) :
return { }
#BEGIN PROPERTIES FUNCTIONS
#END PROPERTIES FUNCTIONS
_move_from = None
_mp_from = None
def do_button_press_event(self, event) :
nearest = (None, 0)
self.grab_focus()
for m in self.main_phrases :
x, y = self._local_coords_for_main_phrase(m,(event.x, event.y))
d = m.find_distance((x, y))
if fc(d, 0) :
target = m.find_nearest(point=(x,y), fall_through=True, enterable_parent=False)
bp = target[1].process_button_press(event)
if bp is None : return False
debug_print(bp)
if not target[1] or not bp :
debug_print((x,y,event.button))
if (event.button == 1) :
self.caret.go_near((x, y), change=True)
self.queue_draw()
if (event.button == 2) :
self._move_from = (event.x,event.y,m)
self._mp_from = m.get_anchor_point()
return True
elif nearest[0] is None or d < nearest[1] :
nearest = (m, d)
if nearest[0] is not None and event.button == 1 :
self.caret.go_near((x, y), change=True)
self.queue_draw()
return True
def do_motion_notify_event(self, event) :
if self._move_from is not None :
m = self._move_from[2]
delta = (event.x-self._move_from[0], event.y-self._move_from[1])
m.move(delta[0] + self._mp_from[0], delta[1] + self._mp_from[1])
self.queue_draw()
def do_button_release_event(self, event) :
for m in self.main_phrases :
x, y = self._local_coords_for_main_phrase(m,(event.x, event.y))
if fc(m.find_distance((x, y)), 0) :
target = m.find_nearest(point=(x,y), fall_through=True, enterable_parent=False)
bp = target[1].process_button_release(event)
debug_print(bp)
if bp is None : return False
if not target[1] or not bp :
if (event.button == 2) :
self._move_from = None
self._mp_from = None
self.queue_draw()
return True
def do_scroll_event(self, event) :
for m in self.main_phrases :
x, y = self._local_coords_for_main_phrase(m,(event.x, event.y))
if fc(m.find_distance((x, y)), 0) :
target = m.find_nearest(point=(x,y), fall_through=True, enterable_parent=False)
if not target[1] or not target[1].process_scroll(event) :
_scaling = 1.2 if event.direction == gtk.gdk.SCROLL_UP else 1/1.2
m.set_size_scaling(_scaling*m.get_ip('font_size_coeff'))
self.queue_draw()
return True
class GlyphResponder(GlyphEntry) :
input_phrase = None
response_phrase = None
input_interactive = True
evalf = False
def __init__(self, position = (5, 5), interactive = True,
resize_to_main_phrase = False, evaluable = False, evalf = False):
ps = self.process_main_phrase_signal
self.evalf = evalf
self.response_phrase = GlypherMainPhrase(ps,
self.line_height,
self.line_height,
(0.0,0.0,0.0),
is_decorated=False,
by_bbox=True)
self.response_phrase.is_caching = True
GlyphEntry.__init__(self, position=position, interactive=False,
resize_to_main_phrase=resize_to_main_phrase,
evaluable=evaluable)
self.input_phrase = self.main_phrase
self.input_interactive = interactive
self.reset_main_phrase()
def reset_main_phrase(self) :
self.caret.attached_to = None
if self.input_phrase :
self.input_phrase.empty()
self.caret.enter_phrase(self.input_phrase)
self.response_phrase.empty()
def process_main_phrase_signal(self, main_phrase, signal, data = None) :
ret = GlyphEntry.process_main_phrase_signal(self, main_phrase, signal, data)
self.response_phrase.background_colour = (1.0, 1.0, 1.0, 0.0)
if main_phrase == self.input_phrase and signal == 'recalc' :
if len(main_phrase) > 0 :
debug_print(main_phrase.format_entities())
self.process_line()
if not main_phrase.entities[0].am('word') :
self.response_phrase.background_colour = (1.0, 1.0, 0.0, 0.4)
else :
self.response_phrase.empty()
debug_print(self.get_text())
return True
return ret
def response_processor(self) :
sym = self.input_phrase.get_sympy()
if self.evalf and isinstance(sym, sympy.core.basic.Basic) :
sym = sym.evalf()
return (self.input_phrase, interpret_sympy(self.response_phrase, sym))
def process_line(self) :
input_line, response = GlyphEntry.process_line(self)
self.response_phrase.empty()
caret = GlypherCaret(self.response_phrase, interactive=True,
container=self, glypher=self)
caret.enter_phrase(self.response_phrase)
if isinstance(response, str) :
caret.insert_entity(make_word(response, self.response_phrase))
else :
caret.insert_entity(response)
return input_line, response
def get_xml(self, input_phrase = False, full = False) :
mp = self.input_phrase if input_phrase else self.response_phrase
debug_print(ET.tostring(mp.get_xml(targets={},
top=False,
full=full)))
xml = ET.ElementTree(mp.get_xml(targets={}, top=False, full=full))
debug_print(ET.tostring(xml.getroot()))
return xml
def set_xml(self, xml) :
pg = parse_phrasegroup(self.input_phrase, xml, top=False)
if pg.am('space_array') :
self.reset_main_phrase(space_array=pg)
else :
self.reset_main_phrase()
#debug_print(pg.to_string())
#self.caret.insert_entity(pg)
#self.input_phrase.adopt(pg, go_inside=False)
#self.queue_draw()
def swap(self) :
if self.main_phrase == self.input_phrase :
self.main_phrase = self.response_phrase
self.interactive = False
else :
self.main_phrase = self.input_phrase
self.interactive = self.input_interactive
self._auto_resize()
self.queue_draw()
display = GlyphDisplay() | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glypher/Widget.py | Widget.py |
import glypher as g
from Word import *
from PhraseGroup import *
from Symbol import *
from Spacer import *
from Function import *
import BinaryExpression
import sympy
import Dynamic
from sympy.core import sympify
class GlypherFraction(GlypherPhraseGroup) :
bodmas_level = 0
row0 = None
vin = None
def recalc_basebox(self) :
GlypherPhraseGroup.recalc_basebox(self)
if self.vin :
l = self.config[0].get_basebox()
m = self.vin.config[0].get_basebox()
self.config[0].basebox = (l[0], l[1], l[2], l[3], m[4], l[5])
def get_sympy(self) :
return Dynamic.Mul(self.get_target('numerator').get_sympy(),\
Dynamic.Pow(self.get_target('denominator').get_sympy(), -1))
def to_latex(self) :
return '\\frac{%s}{%s}' % (self['numerator'].to_latex(),
self['denominator'].to_latex())
def to_string(self, mode = "string") :
if not 'numerator' in self.target_phrases or not 'denominator' in self.target_phrases : return '/'
return self.get_target('numerator').to_string(mode) + '/' + self.get_target('denominator').to_string(mode)
def __init__(self, parent, area = (0,0,0,0), numerator = None, denominator = None) :
GlypherPhraseGroup.__init__(self, parent, [], area, 'row0')
# Make sure that appending doesn't bring the left edge forward
#old_left = self.old_bbox[0]
#adj = self.get_adj(ind)
#if pd['n'] == 'col1' : adj = 10
#debug_print(adj)
#glyph.translate(adj, 0)
#if self.bbox[0] > old_left : self.bbox[0] = old_left
self.mes.append('fraction')
de_cell = GlypherPhrase(self, align=('c','m'))
self.append(de_cell, row=1); self.add_phrase(de_cell, 'row1')
de_cell.set_deletable(2)
de_cell.set_enterable(False)
self.add_target(de_cell, 'denominator')
self.set_rhs_target('denominator')
de_cell.set_font_size_scaling(0.6)
#de_cell.set_line_size_scaling(0.6)
self.set_row_align(1, 'c')
nu_cell = GlypherPhrase(self, align=('c','m'))
self.append(nu_cell, row=-1); self.add_phrase(nu_cell, 'row-1')
nu_cell.set_deletable(2)
nu_cell.set_enterable(False)
#nu_cell.set_attachable(False)
self.add_target(nu_cell, 'numerator')
self.set_lhs_target('numerator')
nu_cell.set_font_size_scaling(0.6)
#nu_cell.set_line_size_scaling(0.6)
self.set_row_align(-1, 'c')
vinculum = GlypherHorizontalLine(None, length=10, thickness=0.04)
vinculum.align=('c','m')
vinculum.set_tied_to(self)
## I think LaTeX just takes the max, but this looks quite nice
## until full compatibility is being implemented
#lc = lambda : \
# 0.5*(nu_cell.width()+de_cell.width()) \
# if de_cell.width() < nu_cell.width() else \
# de_cell.width()
#vinculum.length_calc = lc
#vinculum.set_padding(1, 4)
#vinculum.set_padding(3, 4)
vi_cell = GlypherPhrase(self, align=('c','m')); self.row0 = vi_cell
self.append(vi_cell, row=0); self.add_phrase(vi_cell, 'row0')
vi_cell.set_deletable(2)
vi_cell.set_enterable(False)
vi_cell.set_attachable(False)
vi_cell.adopt(vinculum)
vi_cell.set_horizontal_ignore(True)
vi_cell.set_always_recalc(True)
self.set_lead(vi_cell, GLYPHER_PG_LEAD_VERT)
self.vin = vinculum
self.set_recommending(self["numerator"])
if numerator is not None :
nu_cell.adopt(numerator)
self.set_recommending(self["denominator"])
if denominator is not None :
de_cell.adopt(denominator)
def delete(self, sender = None, if_empty = True) :
if len(self.get_target('numerator').entities) \
+ len(self.get_target('denominator').entities) == 0 \
or not if_empty :
GlypherPhraseGroup.delete(self, if_empty=False)
_orphaning = None
def make_simplifications(self) :
if not 'numerator' in self.target_phrases : return
num_e = self.get_target('numerator').get_entities()
if self.included() and len(num_e) == 1 and num_e[0].am('negative') and len(num_e[0].get_entities())>0:
p = self.get_parent()
self._orphaning = p
q = num_e[0]
q.orphan()
q["expression"].IN().elevate_entities(self.get_target('numerator'))
self.set_recommending(self['numerator'])
self.orphan()
n = BinaryExpression.GlypherNegative(p)
n.get_target('expression').adopt(self)
p.adopt(n)
self._orphaning = None
def child_change(self) :
GlypherPhraseGroup.child_change(self)
self.make_simplifications()
a_half = sympify("1/2")
class GlypherSqrt(GlypherCompoundPhrase) :
bodmas_level = 0
degree = None
def get_sympy(self) :
return Dynamic.Pow(self.get_target('expression').get_sympy(),
a_half if not self.degree else \
self.degree.get_sympy())
def to_string(self, mode = "string") :
return unicode('sqrt(')+self.IN().to_string(mode)+unicode(')')
_altering = False
ex_cell = None
sqrt = None
def child_altered(self, child = None) :
GlypherCompoundPhrase.child_altered(self, child)
if self.ex_cell and self.sqrt and not self._altering :
b = self.sqrt
s = self.ex_cell
sc = (s.config[0].bbox[3]-s.config[0].bbox[1])
bc = (b.config[0].bbox[3]-b.config[0].bbox[1])
if not fc(sc, bc) :
if b.config[0].get_changes() != "" :
raise(RuntimeError('Rescaling sqrt for an un-reset sqrt bounding box'))
self._altering = True
b.set_font_size_scaling((sc/bc)*b.get_size_scaling())
self._altering = False
#FIXME: Note that degree is fixed (as this is a CompoundPhrase)
def __init__(self, parent, area = (0,0,0,0), expression = None,
degree = None) :
GlypherCompoundPhrase.__init__(self, parent, [], area)
#FIXME: Misnomer!
self.mes.append('square_root')
if degree is not None :
degree_pos = GlypherPhrase(self)
degree_pos.set_size_scaling(0.5)
degree_pos.set_enterable(False)
degree_pos.set_attachable(False)
right_mid = GlypherSpace(self, dims=(0.1,0.1))#GlypherVerticalSpacer(self, tied_to=expr, scaling=1.4)
right_mid.set_attachable(False)
degree_side = GlypherPhrase(self)
degree_side.append(right_mid, row=0)
degree_side.append(degree_pos, row=-1)
degree_side.set_enterable(False)
degree_side.set_attachable(False)
degree_pos.append(degree)
self.append(degree_side)
self.degree = degree_pos
sq_cell = GlypherPhrase(self, align=('c','m'))
self.append(sq_cell)
sq_cell.set_enterable(False)
sqrt_sym = GlypherSymbol(sq_cell, u'\u221A', area, ink=True, italic=False)
sqrt_sym.name = '\sqrt'; sq_cell.append(sqrt_sym)
self.sqrt = sq_cell
ex_cell = GlypherPhrase(self, align=('c','m'))
ex_cell.set_p('align_as_entity', True)
self.append(ex_cell)
expr = GlypherPhrase(self)
ex_cell.append(expr)
self.ex_cell = ex_cell
self.add_target(expr, 'expression')
line = GlypherHorizontalLine(None, length=10, thickness=0.05, thickness_too=True)
line.align=('c','m')
line.set_tied_to(expr)
## I think LaTeX just takes the max, but this looks quite nice
## until full compatibility is being implemented
#lc = lambda : \
# 0.5*(nu_cell.width()+de_cell.width()) \
# if de_cell.width() < nu_cell.width() else \
# de_cell.width()
#vinculum.length_calc = lc
#vinculum.set_padding(1, 4)
#vinculum.set_padding(3, 4)
ex_cell.append(line, row=-1)
line.set_vertical_ignore(False)
line.set_horizontal_ignore(True)
line.set_always_recalc(True)
self.set_expr("expression")
if expression is not None :
expr.adopt(expression)
self.set_recommending(self["expression"])
def show_decoration(self) :
return True
g.phrasegroups['fraction'] = GlypherFraction
g.phrasegroups['square_root'] = GlypherSqrt | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glypher/Fraction.py | Fraction.py |
import glypher as g
import exceptions
import copy
import draw
import gutils
from aobject.utils import debug_print
from Phrase import *
from Symbol import *
from Spacer import *
import Parser
from sympy.series import limits
from sympy.core.sympify import SympifyError
import Dynamic
from glypher import \
GLYPHER_PG_LEAD_ALL, \
GLYPHER_PG_LEAD_MID, \
GLYPHER_PG_LEAD_VERT, \
GLYPHER_PG_LEAD_HORI
ac = gutils.array_close
fc = gutils.float_close
class GlypherPhraseGroup (GlypherPhrase) :
phrases = None
phrase = None
lead_phrase = None
first_highlighted_pg_over_active = False
target_phrases = None
alts_phrases = None
get_sympy_code = None
to_string_code = None
to_latex_code = None
alternatives_cat = None
ignore_targets = None
#def to_string(self) :
# return '(' + self.mes[len(self.mes)-1] + '|' + '|'.join([t+'='+self.target_phrases[t].to_string() for t in self.target_phrases])
lhs_target = None
def set_lhs_target(self, lhs_target) :
"""Sets (or with None, unsets) the LHS target, for set_lhs."""
if lhs_target is None :
self.lhs_target = None
elif lhs_target in self.target_phrases :
self.lhs_target = lhs_target
else :
raise IndexError("lhs_target should be a target.")
def set_lhs(self, lhs) :
"""If lhs_target is set, puts lhs there, otherwise returns False."""
if self.lhs_target is not None :
self[self.lhs_target].adopt(lhs)
return True
return False
rhs_target = None
def set_rhs_target(self, rhs_target) :
"""Sets (or with None, unsets) the LHS target, for set_rhs."""
if rhs_target is None :
self.rhs_target = None
elif rhs_target in self.target_phrases :
self.rhs_target = rhs_target
else :
raise IndexError("rhs_target should be a target.")
def set_rhs(self, rhs) :
"""If rhs_target is set, puts rhs there, otherwise returns False."""
if self.rhs_target is not None :
self[self.rhs_target].adopt(rhs)
return True
return False
def __setitem__(self, key, value) :
"""Add a new TargetPhrase called key in value."""
self.add_target(value, key)
def __getitem__(self, key) :
"""Retrieve a Target (or child) by key."""
# If this is an int, then let Phrase find it, otherwise it should be a
# string (of some sort) and a Target of ours.
if isinstance(key, int) :
return GlypherPhrase.__getitem__(self, key)
elif not isinstance(key, basestring) :
raise(TypeError("For pg[target], target must be str not " + str(type(key))))
if key in self.target_phrases :
return self.get_target(key)
raise(IndexError("Target "+key+" not found in PhraseGroup"))
# Stop looking up tree to find edit mode
#def is_edit_mode(self) :
# return self.edit_mode
# Stop upward search for all binary expressions, except SpaceArray
stop_for_binary_expression_default = True
stop_for_binary_expression_exceptions = ('space_array', 'equality')
def to_latex(self) :
if not self.get_visible() : return ""
elif self.get_blank() : return " "
if self.to_latex_code :
return Dynamic.eval_for_sympy(self, self.to_latex_code)
return GlypherPhrase.to_latex(self)
def to_string(self, mode = "string") :
if not self.get_visible() : return unicode("")
elif self.get_blank() : return unicode(" ")
if self.to_string_code :
return Dynamic.eval_for_sympy(self, self.to_string_code)
return GlypherPhrase.to_string(self, mode=mode)
def get_sympy(self) :
if self.get_sympy_code :
return Dynamic.eval_for_sympy(self, self.get_sympy_code)
return GlypherPhrase.get_sympy(self)
def draw_alternatives(self, cr) :
pass
def next_alternative(self) :
self._alternative_in_dir(go_next=True)
def _alternative_in_dir(self, go_next = True) :
cat = self.alternatives_cat
if cat is not None :
alts = g.find_phrasegroup_alternatives(cat)
if self.mes[-1] in alts and len(alts) > 1 :
pos = alts.index(self.mes[-1])
pos = (len(alts) + pos + (1 if go_next else -1)) % len(alts)
new_name = alts[pos]
operands = []
targets = [self.lhs_target, self.rhs_target] + \
self.target_phrases.keys()
for tgt in targets :
if tgt is None :
continue
tgt = self.target_phrases[tgt]
if len(tgt) == 0 :
continue
op = tgt.get_entities()[0]
op.orphan()
operands.append(op)
self.parent.exchange(self, Parser.make_phrasegroup(self.parent,
new_name,
operands=operands))
def prev_alternative(self) :
self._alternative_in_dir(go_next=False)
def recalc_basebox(self) :
GlypherPhrase.recalc_basebox(self)
pbasebox = self.config[0].get_basebox()
if self.lead_phrase is None : return pbasebox
b = self.lead_phrase.config[0].get_basebox()
#if len(self.lead_phrase.get_entities()) > 0 :
# debug_print(self.lead_phrase.get_entities()[0].format_me())
# debug_print(self.lead_phrase.get_entities()[0].get_basebox())
# debug_print(self.lead_phrase.format_me())
# debug_print(b)
# debug_print('-'*30)
la = self.get_p('lead_application')
self.config[0].basebox = \
(b[0] if la[0] else pbasebox[0],\
b[1] if la[1] else pbasebox[1],\
b[2] if la[2] else pbasebox[2],\
b[3] if la[3] else pbasebox[3],\
b[4] if la[4] else pbasebox[4],\
b[5] if la[5] else pbasebox[5])
def set_lead(self, lead, application = (True,True,True,True)) :
self.lead_phrase = lead
self.set_p('lead_application', application)
def get_xml(self, name = None, top = True, targets = None, full = False) :
if targets is None :
targets = self.target_phrases
if full :
root = GlypherPhrase.get_xml(self, name, top, targets=targets,
full=False)
else :
root = ET.Element(self.get_name())
root.set('type', self.mes[-1])
tgts = ET.Element('targets')
for t in self.target_phrases :
if t in self.ignore_targets :
continue
r = self.target_phrases[t].get_xml(name='target', top=False,
full=False)
r.set('name', t)
tgts.append(r)
if len(tgts) > 0 :
root.append(tgts)
if self.lhs_target is not None :
root.set('lhs', self.lhs_target)
if self.rhs_target is not None :
root.set('rhs', self.rhs_target)
return root
def child_change(self) :
"""Called if a child changes in a non-geometric sense."""
GlypherPhrase.child_change(self)
if self.included() :
self.make_simplifications()
def make_simplifications(self) :
pass
def add_alts(self, phrase, name) :
ap = make_alts_phrase()
phrase.adopt(ap)
self.alts_phrases[name] = ap
return ap
def add_target(self, phrase, name, stay_enterable = False) :
"""Add a Target, that is, a TargetPhrase which looks a bit funny and can
be directly addressed from the PhraseGroup by a string. It sits Inside
the passed Phrase (i.e. TP=P.IN()=TP.IN()) and, by default, should be
indistinguishable to the end-user through the GUI."""
if not isinstance(name, basestring) :
raise TypeError("Target names in PGs should always be str/unicode")
if not isinstance(phrase, GlypherPhrase) :
raise TypeError("""
Only Phrases may be given to be turned into Targets
""")
# Generate a TargetPhrase
tp = make_target_phrase()
# Ensure it will delete its parent automatically
tp.set_deletable(2)
# Avoid users falling into the nesting gap
if not stay_enterable :
phrase.set_enterable(False)
# Put it in
phrase.adopt(tp)
# Add it to the dictionary
self.target_phrases[name] = tp
# Tell tp who the pg is
tp.pg = self
# Give it a name for ease of finding
tp.set_name(name)
def get_target(self, name) :
return self.target_phrases[name]
# Potentially confusing name similarity
def get_alts(self, name) :
return self.alts_phrases[name]
def get_alt(self, name) :
return self.alts_phrases[name].active_child
def inside_a(self, what, ignore=()) :
if self.am(what) : return self
if self.included() and len(set(self.mes) & set(ignore))>0 :
return self.get_parent().inside_a(what, ignore)
return None
def set_highlight_group(self, highlight_group) : self.set_p('highlight_group', highlight_group)
def get_highlight_group(self) : return self.get_p('highlight_group')
def __init__(self, parent, phrase_defs = [], area = (0,0,0,0), lead_phrase = None, phraser=None, highlight_group = True) :
self.phrases = {}
self.target_phrases = {}
self.alts_phrases = {}
self.ignore_targets = []
GlypherPhrase.__init__(self, parent, area)
self.add_properties({'lead_application' :(True,True,True,True,True,True),
})
#self.add_properties({'local_space' : True})
#debug_print((self,self.mes))
self.set_highlight_group(highlight_group)
self.mes.append('phrasegroup')
self.set_enterable(False)
if phraser == None : phraser = GlypherExpression if g.math_mode else GlypherPhrase
self.phraser = phraser
test = phraser(parent)
self.set_p('phraser', test.mes[len(test.mes)-1] if phraser else None)
del test
if isinstance(phrase_defs, dict) :
for name in phrase_defs : self.append_phrase_to_group(name, phrase_defs[name])
else :
for ind, pd in enumerate(phrase_defs) :
glyph = pd['o']
pd['x'] = ind
self.phrases[pd['n']] = [glyph,pd]
# Make sure that appending doesn't bring the left edge forward
#old_left = self.old_config.bbox[0]
#adj = self.get_adj(ind)
#if pd['n'] == 'col1' : adj = 10
#glyph.translate(adj, 0)
self.append(glyph, row=pd['r'] if 'r' in pd else 0, override_in=True, move=(True,True), align=pd['a'] if 'a' in pd else ('l','m'))
#if self.config[0].bbox[0] > old_left : self.config[0].bbox[0] = old_left
#self.feed_up()
self.characteristics.append('_bodmasable')
#def elevate_entities(self, new_parent, adopt = False, to_front = False) :
# #debug_print(self.lead_phrase)
# if self.lead_phrase is not None :
# return self.get_phrase(self.lead_phrase).elevate_entities(new_parent, adopt, to_front)
def get_phrase(self, phrase) :
return self.phrases[phrase][0]
def add_phrase(self, phr, name) :
self.phrases[name] = [phr, {'x':0}]
phr.set_name(name)
def set_child_active(self, active, desc) :
GlypherPhrase.set_child_active(self, active, desc)
ancs = desc.get_ancestors()
if not active : self.first_highlighted_pg_over_active = False
else :
for anc in ancs :
if anc == self and anc.get_highlight_group() : self.first_highlighted_pg_over_active = True; break
if anc.am('phrasegroup') and anc.get_highlight_group() :
self.first_highlighted_pg_over_active = False; break
def get_adj(self, loc) :
adj = loc
for phr in self.phrases :
p = self.phrases[phr]
#debug_print(p[0].to_string() + ' ' +str(p[0].config[0].bbox))
if loc > p[1]['x'] : adj += p[0].config[0].bbox[2]-p[0].config[0].bbox[0]
return adj
def append_phrase_to_group(self, name, pd) :
phraser = self.phraser
adj = self.get_adj(pd['x'])
if 'g' in pd : phraser = pd['g']
pos = (self.config[0].bbox[0], self.config[0].bbox[3])
glyph = phraser(self, (pos[0]+adj,self.get_topline()+pd['y'],pos[0]+adj,self.get_baseline()+pd['y']),\
pd['ls'] if 'ls' in pd else 1.0,\
pd['fs'] if 'fs' in pd else 1.0,\
pd['a'] if 'a' in pd else (l,b))
glyph.x_offset = adj
glyph.y_offset = pd['y']
self.phrases[name] = [glyph,pd]
# Make sure that appending doesn't bring the left edge forward
old_left = self.old_config.bbox[0]
self.append(glyph, row=pd['r'] if 'r' in pd else 0, override_in=True, move=(True,True), align=pd['a'] if 'a' in pd else ('l','m'))
if self.config[0].bbox[0] > old_left : self.config[0].bbox[0] = old_left
self.feed_up()
def decorate(self, cr) :
self.draw_topbaseline(cr)
if not self.get_visible() : return
if g.additional_highlighting and self.get_attached() :
cr.save()
cr.move_to(self.config[0].bbox[0]-2, self.config[0].bbox[3]+2)
draw.draw_full_blush(cr, self.config[0].bbox[2]-self.config[0].bbox[0]+4, self.config[0].bbox[3]-self.config[0].bbox[1]+4, (0.8,0.95,0.95))
cr.set_source_rgba(0.6, 0.9, 0.9, 1.0)
area=(self.config[0].bbox[0]-2, self.config[0].bbox[2]+2, self.config[0].bbox[1]-2, self.config[0].bbox[3]+2)
draw.trace_rounded(cr, area, 5)
cr.stroke()
cr.restore()
elif self.get_highlight_group() and\
self.first_highlighted_pg_over_active and self.show_decoration() :
cr.save()
#cr.set_line_width(2.0)
#cr.rectangle(self.config[0].bbox[0]-2, self.config[0].bbox[1]-2, self.config[0].bbox[2]-self.config[0].bbox[0]+4, self.config[0].bbox[3]-self.config[0].bbox[1]+4)
#cr.set_source_rgba(0.9, 0.8, 0.6, 0.8)
cr.move_to(self.config[0].bbox[0]-2, self.config[0].bbox[1]-8)
draw.draw_inverse_blush(cr, self.config[0].bbox[2]-self.config[0].bbox[0]+4, self.config[0].bbox[3]-self.config[0].bbox[1]-2, (0.9,0.8,0.6))
if g.stroke_mode :
cr.fill_preserve()
cr.set_source_rgba(0.5, 0.5, 0.4, 0.6)
cr.stroke()
else : cr.fill()
cr.restore()
#if you want to run any phrase functions, you should always run through the expr() fn,
#and below is why.
class GlypherCompoundPhrase(GlypherPhraseGroup) :
phrase_name = ''
in_ready = False
def __init__(self, parent, phrase_defs, area = (0,0,0,0), phrase = None, phraser = GlypherPhrase) :
self.phrase_name = phrase
GlypherPhraseGroup.__init__(self, parent, phrase_defs, area, phrase, phraser, highlight_group=False)
if phrase is not None : self.set_expr(phrase)
#self.in_ready = True
#self.IN()._out = self
#self.phrases[phrase][0].set_deletable(2) # Send delete requests for rhs to me
#self.set_recommending(self.IN())
#get_caret().enter_phrase(self.expr())
#def IN(self) : return self.phrases[self.phrase_name][0].IN() if self.in_ready else self
def set_expr(self, phrase) :
self.phrase_name = phrase
#debug_print(self.phrases)
self.in_ready = True
self.set_in(self.get_target(self.phrase_name))
self.set_lead(self.get_target(phrase).IN(), GLYPHER_PG_LEAD_ALL)
self.recalc_bbox()
class GlypherBracketedPhrase(GlypherCompoundPhrase) :
left_bracket = None
right_bracket = None
is_suspend_collapse_checks = False
collapse_condition = None
stop_for_binary_expression_default = False
stop_for_binary_expression_exceptions = ()
def set_bracket_shapes(self, bracket_shapes) :
self.suspend_recommending()
brkt_shape = bracket_shapes[0]
phrase = self.left_bracket
for i in (0,1) :
symbol = GlypherSymbol(self, brkt_shape, ink=True)
symbol.set_attachable(False)
phrase.IN().adopt(symbol)
brkt_shape = bracket_shapes[1]; phrase = self.right_bracket
self.set_p('bracket_shapes', bracket_shapes)
self.resume_recommending()
def get_bracket_shapes(self) :
return self.get_p('bracket_shapes')
def __init__(self, parent, area = (0,0,0,0), line_size_coeff = 1.0, font_size_coeff = 1.0, align = ('l','m'), no_fices = False,\
auto = True, keep_space = False, hidden_spacing = (0,0), expr = None, bracket_shapes = ('(',')') ) :
#pds = {}
# pass no_fices
#pds['left_bracket'] = { 'x' : 0 , 'y' : 0, 'a' : ('l','m') }
#pds['expression'] = { 'x' : 1 , 'y' : 0, 'a' : align }
#pds['right_bracket'] = { 'x' : 2 , 'y' : 0, 'a' : ('l','m') }
self.suspend_collapse_checks()
GlypherCompoundPhrase.__init__(self, parent, [], area)
self.no_bracket = set()
self.set_p('no_bracket', self.no_bracket)
self.mes.append('bracketed_phrase')
self.no_bracket.add('fraction')
self.no_bracket.add('symbol')
self.no_bracket.add('square_root')
self.no_bracket.add('matrix')
self.no_bracket.add('reference')
self.no_bracket.add('constant')
self.left_bracket = GlypherPhrase(self); self.add_phrase(self.left_bracket, 'left_bracket')
self.right_bracket = GlypherPhrase(self); self.add_phrase(self.right_bracket, 'right_bracket')
#self.left_space = GlypherSpace(self, (hidden_spacing[0],1))
#self.right_space = GlypherSpace(self, (hidden_spacing[1],1))
#self.left_space = GlypherSymbol(None, '-')
#self.right_space = GlypherSymbol(None, '-')
self.expression = GlypherPhrase(self)
self.expression.set_p('align_as_entity', True)
#self.expression_out = GlypherPhrase(self, align_as_entity=True)
self.append(self.left_bracket, override_in=True, move=(True,True), align=('l','m'))
#self.append(self.left_space, override_in=True, move=(True,True), align=('l','m'))
self.append(self.expression)
#self.append(self.expression_out, override_in=True, move=(True,True), align=align)
#self.expression_out.adopt(self.expression)
#self.append(self.right_space, override_in=True, move=(True,True), align=('l','m'))
self.append(self.right_bracket, override_in=True, move=(True,True), align=('l','m'))
#self.target_phrases['expression'] = self.expression
self.add_target(self.expression, 'expression')
self.set_enterable(False)
self.set_expr('expression')
self.set_lead(self.expression.IN(), GLYPHER_PG_LEAD_MID)
self.set_p('keep_space', keep_space)
#self.left_space.hide()
#self.right_space.hide()
brkt_shape = bracket_shapes[0]
phrase = self.left_bracket
for i in (0,1) :
phrase.set_enterable(False)
phrase.set_attachable(False)
phrase = self.right_bracket
self.set_bracket_shapes(bracket_shapes)
#if expr is not None :
# self.phrases['expression'][0].append(expr)
if auto : self.set_auto_bracket(True)
#debug_print(self.left_bracket.format_loc())
#self.set_auto_bracket(False)
#debug_print(self.right_bracket.format_loc())
#debug_print(self.expression.format_loc())
self.resume_collapse_checks()
if expr is not None :
self.expression.append(expr)
self.check_collapse()
self.set_recommending(self.get_target('expression'))
def set_auto_bracket(self, auto_bracket) : self.set_p('auto_bracket', auto_bracket)
def get_auto_bracket(self) : return self.get_p('auto_bracket')
def set_no_bracket(self, no_bracket) : self.set_p('no_bracket', no_bracket)
def get_no_bracket(self) : return self.get_p('no_bracket')
def set_collapse_condition(self, collapse_condition) : self.set_p('collapse_condition', collapse_condition)
def get_collapse_condition(self) : return self.get_p('collapse_condition')
def set_collapsed(self, collapsed) : self.set_p('collapsed', collapsed)
def get_collapsed(self) : return self.get_p('collapsed')
# This ents0 arg allows us to decide should_collapse based on a different
# entity
def should_collapse(self, ents0 = None) :
ents = self.get_target('expression').get_entities()
if ents0 is None and len(ents) == 1 :
ents0 = ents[0]
if ents0 is not None :
while ents0.OUT().mes[-1] in ('phrase', 'target_phrase') and len(ents0) == 1 :
ents0 = ents0[0]
#debug_print(ents)
# ents0.is_wordlike() or
return len(ents) == 0 or (ents0 and \
(len(set(ents0.mes) & self.get_no_bracket())>0 or \
ents0.is_wordlike() or ents0.get_p('force_no_bracket')) \
)
def suspend_collapse_checks(self) :
self.is_suspend_collapse_checks = True
def resume_collapse_checks(self) :
self.is_suspend_collapse_checks = False
self.check_collapse()
def check_collapse(self) :
cc = self.get_collapse_condition()
if self.get_auto_bracket() and not self.is_suspend_collapse_checks :
if self.should_collapse() \
or (cc and cc()) :
self.brackets_collapse()
else :
self.brackets_restore()
def brackets_collapse(self) :
ks = self.get_p('keep_space')
if isinstance(ks, tuple) :
ksl = ks[0]; ksr = ks[1]
else :
ksl = ks; ksr = ks
if self.left_bracket.get_visible() and not ksl : self.left_bracket.hide()#; self.left_space.show()
if not self.left_bracket.get_blank() : self.left_bracket.blank()
if self.right_bracket.get_visible() and not ksr : self.right_bracket.hide()#; self.right_space.show()
if not self.right_bracket.get_blank() : self.right_bracket.blank()
self.set_collapsed(True)
def brackets_restore(self) :
ks = self.get_p('keep_space')
if isinstance(ks, tuple) :
ksl = ks[0]; ksr = ks[1]
else :
ksl = ks; ksr = ks
if not self.left_bracket.get_visible() and not ksl : self.left_bracket.show()#; self.left_space.show()
if self.left_bracket.get_blank() : self.left_bracket.unblank()
if not self.right_bracket.get_visible() and not ksr : self.right_bracket.show()#; self.right_space.show()
if self.right_bracket.get_blank() : self.right_bracket.unblank()
self.set_collapsed(False)
def child_change(self) :
self.check_collapse()
GlypherCompoundPhrase.child_change(self)
_altering = False
def child_altered(self, child = None) :
GlypherCompoundPhrase.child_altered(self, child)
if self.in_ready and not self._altering and not self.is_suspend_collapse_checks : #and False :#RMV
self._altering = True
for b in (self.left_bracket, self.right_bracket) :
#break
#if not b or not b.visible : break
if not b : break
sc = (self.IN().config[0].basebox[5]-self.IN().config[0].basebox[3])
#bc = b.get_scaled_font_size()
bc = (b.config[0].basebox[5]-b.config[0].basebox[3])
if not fc(.8*sc, bc) :
if b.config[0].get_changes() != "" :
raise(RuntimeError('Rescaling parentheses for an un-reset bracket bounding box'))
b.set_font_size_scaling((.8*sc/bc)*b.get_size_scaling())
bc = (b.config[0].basebox[5]-b.config[0].basebox[3])
self._altering = False
class GlypherBODMASBracketedPhrase(GlypherBracketedPhrase) :
def set_bodmas_sensitivity(self, bodmas_sensitivity) : self.set_p('bodmas_sensitivity', bodmas_sensitivity)
def get_bodmas_sensitivity(self) : return self.get_p('bodmas_sensitivity')
def __init__(self, parent, area = (0,0,0,0), line_size_coeff = 1.0, font_size_coeff = 1.0, align = ('l','m'), no_fices = False) :
GlypherBracketedPhrase.__init__(self, parent, area, line_size_coeff, font_size_coeff, align, no_fices)
def should_collapse(self, ents0 = None) :
# TODO: move 'expr' to 'inside'
ents = self.IN().get_entities()
if ents0 is None and len(ents) == 1 :
ents0 = ents[0]
return GlypherBracketedPhrase.should_collapse(self, ents0=ents0) or \
(ents0 and ents0.am_c('_bodmasable') and ents0.get_bodmas_level() < self.get_bodmas_sensitivity())
def child_change(self) :
GlypherBracketedPhrase.child_change(self)
self.check_collapse()
#debug_print(self.entities[0].get_bodmas_level())
class GlypherTargetPhraseError(RuntimeError) :
tp = None
def __init__(self, tp, err = None) :
self.tp = tp
tp.set_error_note(err)
RuntimeError.__init__(self, err)
class GlypherTargetPhrase(GlypherPhrase) :
pg = None
hl_anc = False
error = False
def __init__(self, parent, area = (0,0,0,0), line_size_coeff = 1.0, font_size_coeff = 1.0, align = ('l','m'), auto_fices = False) :
GlypherPhrase.__init__(self, parent, area, line_size_coeff, font_size_coeff, align, auto_fices)
self.mes.append('target_phrase')
self.characteristics.append('_bodmasable')
self.characteristics.append('_in_phrase')
self.add_properties({'blank_ratio' : 0.15, 'attachable' : True,
'local_space' : True})
def get_phrasegroup(self) :
return self.pg
def get_bodmas_level(self) :
ents = self.get_entities()
#debug_print(self.entities)
if (len(ents) == 1 and ents[0].am_c('_bodmasable')) :
return ents[0].get_bodmas_level()
else : return 100
def decorate(self, cr) :
if g.show_rectangles and self.show_decoration() :
cr.save()
cr.set_line_width(2.0)
cr.set_source_rgba(0.5, 0.5, 0.8, 0.4)
cr.rectangle(self.config[0].bbox[0]-2, self.config[0].bbox[1]-2, self.config[0].bbox[2]-self.config[0].bbox[0]+4, self.config[0].bbox[3]-self.config[0].bbox[1]+4)
cr.stroke()
cr.restore()
cr.set_source_rgba(0.5, 0.5, 0.8, 1.0)
cr.move_to(self.config[0].bbox[0]-4, self.config[0].basebox[4])
cr.line_to(self.config[0].bbox[2]+4, self.config[0].basebox[4])
cr.stroke()
if not self.is_enterable() : return
hl_anc = None
# If this is in an unhighlighted highlight group, don't show it, otherwise if the first highlighted group is
# above it, show it
for anc in self.get_ancestors() :
if anc.am('phrasegroup') :
if anc.first_highlighted_pg_over_active : hl_anc = anc; break
#else : hl_anc = None; break
elif anc.get_highlight_group() : hl_anc = None; break
self.hl_anc = hl_anc
if not hl_anc and not self.error : return
cr.save()
red = 1.0 if self.error else 0.4
cr.set_source_rgba(red, 0.4, 0.2, 0.1 if g.show_rectangles else 0.2)
area=(self.config[0].bbox[0]-2, self.config[0].bbox[2]+2, self.config[0].bbox[1]-2, self.config[0].bbox[3]+2)
draw.trace_rounded(cr, area, 5)
if len(self.get_entities()) == 0 :
cr.fill_preserve()
cr.set_source_rgba(red, 0.4, 0.2, 0.2 if g.show_rectangles else 0.4)
cr.stroke()
cr.restore()
def get_sympy(self) :
#if len(self.IN().entities) == 0 :
# raise GlypherTargetPhraseError(self, "Please enter something!")
try :
sy = GlypherPhrase.get_sympy(self)
self.set_error_note()
return sy
except GlypherTargetPhraseError :
self.set_error_note()
raise
except exceptions.RuntimeError as e:
self.set_error_note("Problem with sympy parsing : " +str(e))
raise GlypherTargetPhraseError(self, str(e))
except SympifyError as e:
self.set_error_note("Sympy complained : " +str(e))
raise GlypherTargetPhraseError(self, str(e))
except :
self.set_error_note("Problem with sympy parsing : " +str(sys.exc_info()[1]))
raise GlypherTargetPhraseError(self, str(sys.exc_info()[1]))
ref_target_phrase = None
def make_target_phrase () :
global ref_target_phrase
if ref_target_phrase is None :
ref_target_phrase = GlypherTargetPhrase(None)
return copy.deepcopy(ref_target_phrase)
ref_bracketed_phrase = None
def make_bracketed_phrase () :
global ref_bracketed_phrase
if ref_bracketed_phrase is None :
ref_bracketed_phrase = GlypherBracketedPhrase(None)
return copy.deepcopy(ref_bracketed_phrase)
g.phrasegroups['phrasegroup'] = GlypherPhraseGroup
g.phrasegroups['bracketed_phrase'] = GlypherBracketedPhrase
g.phrasegroups['target_phrase'] = GlypherTargetPhrase | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glypher/PhraseGroup.py | PhraseGroup.py |
__all__ = ['Glypher', 'GlyphEntry', 'GlyphMaker']
import os
import shutil
import lxml.etree as ET
from xml.parsers.expat import ExpatError
from aobject.paths import *
from aobject.utils import debug_print
import glypher as g
from Parser import add_phrasegroup_tree, add_formula_tree, load_shortcuts
from Function import function_init
from Word import word_init, make_word
import Widget
g.import_interpretations()
g.import_combinations()
g.import_specials()
formula_files = map(lambda e : get_share_location()+'formulae/'+e,
os.listdir(get_share_location()+'formulae'))
formula_files += map(lambda e : get_user_location()+'glypher/formulae/'+e,
os.listdir(get_user_location()+'glypher/formulae'))
for u in formula_files :
try :
tree = ET.parse(u)
name = tree.getroot().get('name').lower()
add_formula_tree(name, tree)
except (ExpatError, ET.XMLSyntaxError) as e :
debug_print("WARNING : Could not parse formula '%s', continuing without: %s" % (u, e))
file_sets = (map(lambda e : get_share_location()+'defs/'+e,
os.listdir(get_share_location()+'defs')),
map(lambda e : get_user_location()+'glypher/phrasegroups/'+e,
os.listdir(get_user_location()+'glypher/phrasegroups')))
user = False
for file_set in file_sets :
for u in file_set :
try :
tree = ET.parse(u)
name = tree.getroot().tag.lower()
add_phrasegroup_tree(name, tree, user=user, latex=tree.getroot().get('latex_name'))
except (ExpatError, ET.XMLSyntaxError) as e :
debug_print("WARNING : Could not parse PG '%s', continuing without: %s" % (u, e))
user = True
word_init()
function_init()
shortcut_default_file = get_share_location()+'shortcuts.default.xml'
load_shortcuts(shortcut_default_file)
shortcut_file = get_user_location()+'glypher/shortcuts.xml'
if not os.path.exists(shortcut_file) :
shutil.copyfile(shortcut_default_file,
shortcut_file)
load_shortcuts(shortcut_file) | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glypher/__init__.py | __init__.py |
import glypher as g
import Dynamic
import sympy
import re
import draw
from Phrase import *
from PhraseGroup import *
from Symbol import *
from sympy import physics, sympify
from sympy.core.symbol import Symbol as sySymbol
from sympy.core.basic import Basic
from sympy.core.numbers import *
from sympy.core.sets import *
import Parser
import Decoration
class GlypherWord (GlypherPhrase) :
title = "Word"
info_text = '''
Basic conceptual unit - combination of unicode characters (GlypherSymbols)
together representing a sympy Symbol
'''
defined_function = False
defined_symbol = False
let_function = False
let_matrix = False
wildcard = False
def is_function(self) :
return self.defined_function or self.let_function
def set_auto_italicize(self, auto_italicize) : self.set_p('auto_italicize', auto_italicize)
def get_auto_italicize(self) : return self.get_p('auto_italicize')
def __init__(self, parent, area = (0,0,0,0)) :
GlypherPhrase.__init__(self, parent, area)
self.mes.append('word')
self.add_properties({'auto_italicize' : True, 'wordlike' : True})
def split(self, position) :
area = (self.config[0].bbox[0], self.get_topline(), self.config[0].bbox[2], self.get_baseline())
wanc = area[0]
word1 = GlypherWord(self.get_up(), area)
word2 = GlypherWord(self.get_up(), area)
ents = self.get_entities()
bboxes = [ent.config[0].bbox[0] for ent in ents]
i = 0
for ent in ents :
ent.orphan()
if bboxes[i]-wanc < position : word1.append(ent, quiet = True)
else : word2.append(ent, quiet = True)
i += 1
word1.recalc_bbox()
word2.recalc_bbox()
if len(word1.get_entities()) == 0 : return (None,word2)
elif len(word2.get_entities()) == 0 : return (word1,None)
return (word1, word2)
# We assume that, if a word is not leading with a number, it shouldn't be considered as such
def is_leading_with_num(self) : return self.is_num()
def is_num(self) : return all_numbers.match(self.to_string())
def is_roman(self) : return all_roman.match(self.to_string())
def to_latex(self) :
if not self.get_visible() : return ""
elif self.get_blank() : return " "
me_str = self.to_string()
if me_str in g.interpretations :
me_str = g.interpretations[me_str]["latex"]
return str(me_str)
#def _get_symbol_string(self, sub = True) :
# if self.is_num() :
# return None
# return GlypherPhrase._get_symbol_string(sub=sub)
def get_sympy(self, sub = True, ignore_func = False) :
if self.let_function and not ignore_func and \
self.get_sympy(ignore_func=True) in g.let_functions :
name = self.get_sympy(ignore_func=True)
return g.let_functions[name]
elif self.defined_function and not ignore_func and \
self.get_sympy(ignore_func=True) in g.define_functions :
name = self.get_sympy(ignore_func=True)
args = g.define_functions[name]
f = Dynamic.Function(str(self.to_string()))
return f(*args)
elif self.let_matrix and not ignore_func and \
self.get_sympy(ignore_func=True) in g.let_matrices :
name = self.get_sympy(ignore_func=True)
return g.let_matrices[name]
me_str = self._get_symbol_string(sub)
if self.is_num() :
sym = sympify(self.to_string())
else :
sym = sySymbol(str(me_str))
if sub and sym in g.var_table :
sym = g.var_table[sym]
if sym in g.define_symbols and not ignore_func :
sym = g.define_symbols[sym]
return sym
#return sySymbol(self.to_string("sympy"))
def get_symbol_extents(self) :
if len(self.get_entities()) == 0 :
return (self.config[0].bbox[0], self.config[0].bbox[2])
extents = [self.get_entities()[0].config[0].bbox[0],
self.get_entities()[0].config[0].bbox[2]]
for sym in self.get_entities() :
ie = sym.config[0].bbox
if ie[0] < extents[0] :
extents[0] = ie[0]
if ie[2] > extents[1] :
extents[1] = ie[2]
return extents
def draw(self, cr) :
try :
if self.get_visible() and not self.get_blank() and \
self.get_sympy(sub=False, ignore_func=True) in g.var_table :
cr.save()
cr.set_source_rgba(1.0,0.8,0,0.5)
#cr.rectangle(self.config[0].bbox[0], self.config[0].bbox[1],
# self.get_width(), self.get_height())
area = (self.config[0].bbox[0],
self.config[0].bbox[2],
self.config[0].bbox[1],
self.config[0].bbox[3])
draw.trace_rounded(cr, area, 7)
cr.fill()
cr.restore()
except :
pass
GlypherPhrase.draw(self, cr)
def process_key(self, name, event, caret) :
mask = event.state
if name == 'Return' and \
self.included() and \
self.get_sympy(sub=False, ignore_func=True) in g.var_table :
sy = self.get_sympy(sub=False, ignore_func=True)
new_pg = interpret_sympy(self.get_parent(), g.var_table[sy])
self.get_parent().exchange(self, new_pg)
return True
return GlypherPhrase.process_key(self, name, event, caret)
def _adjust_bbox(self, bbox) :
'''
Override this to expand (or contract) bbox after it has been set by
contained elements.
'''
for sym in self.get_entities() :
ie = sym.get_ink_extent()
if ie is None :
continue
if ie[0] < bbox[0] :
bbox[0] = ie[0]
if ie[1] > bbox[2] :
bbox[2] = ie[1]
def child_change(self) :
'''Runs a few checks to see what kind of Word we now have.'''
GlypherPhrase.child_change(self)
# Check whether we have a collapsible combination
if self.to_string() in g.combinations :
new_sym = GlypherSymbol(self, g.combinations[self.to_string()])
self.empty()
self.adopt(new_sym)
# Check whether we have a special combination
if self.to_string() in g.specials and self.included() :
pg = Parser.make_phrasegroup(self.get_parent(),
g.specials[self.to_string()])
self.get_parent().exchange(self, pg)
self.set_recommending(pg)
return
try :
symp = self.get_sympy(ignore_func=True)
except :
symp = None
# If we have a Let function, show it up
if symp and isinstance(symp, Basic) and \
symp in g.let_functions :
if not self.let_function :
self.let_function = True
self.set_bold(True)
self.set_rgb_colour((0.5, 0.2, 0.5))
# If we have a Define function, show it up
elif symp and isinstance(symp, Basic) and \
symp in g.define_functions :
if not self.defined_function :
self.defined_function = True
self.set_bold(True)
self.set_rgb_colour((0.2, 0.4, 0.4))
# If we have a Let matrix, show it up
elif symp and isinstance(symp, Basic) and \
symp in g.let_matrices :
if not self.let_matrix :
self.let_matrix = True
self.set_bold(True)
elif symp and isinstance(symp, Basic) and \
symp in g.define_symbols and \
isinstance(g.define_symbols[symp], Dynamic.Wild) :
if not self.wildcard :
self.wildcard = True
self.set_rgb_colour((0.8, 0.6, 0.0))
elif symp and isinstance(symp, Basic) and \
symp in g.define_symbols :
if not self.defined_symbol :
self.defined_symbol = True
self.set_rgb_colour((0.4, 0.4, 0.4))
# Otherwise, cancel those settings
elif self.defined_function or self.let_function or self.wildcard or \
self.defined_symbol :
self.defined_function = False
self.let_function = False
self.set_bold(False)
self.set_rgb_colour(None)
if self.get_auto_italicize() :
if len(self.entities) > 1 and self.is_roman() :
for e in self.entities :
if e.get_italic() : e.set_italic(False)
else :
for e in self.entities :
if not e.get_italic() and all_roman.match(e.to_string()) :
e.set_italic(True)
def set_italic(self, italic) :
for e in self.entities :
if e.get_italic() is not italic :
e.set_italic(italic)
def decorate(self, cr) :
#self.draw_topbaseline(cr)
if not self.get_visible() : return
#if self.active and self.shows_active :
if g.additional_highlighting and self.get_attached() and self.get_shows_active() :
cr.save()
#cr.set_line_width(4.0)
#cr.rectangle(self.bbox[0]-2, self.bbox[1]-2, self.bbox[2]-self.bbox[0]+4, self.bbox[3]-self.bbox[1]+4)
cr.move_to(self.config[0].bbox[0]-2, self.config[0].bbox[3]+2)
draw.draw_blush( cr, self.config[0].bbox[2]-self.config[0].bbox[0]+4, (0.5,0.5,0.5), 8)
#cr.line_to(self.config[0].bbox[2]+2, self.config[0].bbox[3]+2)
#cr.set_source_rgba(0.5, 0.5, 0.5, 0.6)
#cr.stroke()
cr.restore()
elif self.get_attached() or (len(self.get_entities()) > 1 and not self.is_num()) :
cr.save()
#cr.set_line_width(4.0)
cr.move_to(self.config[0].bbox[0]+2, self.config[0].bbox[3]-2)
#draw_blush( cr, self.config[0].bbox[2]-self.config[0].bbox[0]+4, (0.7,0.9,0.7), 2)
cr.line_to(self.config[0].bbox[2]-2, self.config[0].bbox[3]-2)
col = (0.7, 0.7, 0.9, 1.0) if self.get_attached() else (0.7, 0.9, 0.7, 1.0)
cr.set_source_rgba(*col)
cr.stroke()
cr.restore()
alternatives = {
1 : ('planck', 'hbar'),
2 : ('second', 'millisecond', 'microsecond', 'nanosecond', 'picosecond'),
3 : ('gee','gram', 'kilogram', 'microgram', 'milligram'),
4 : ('liter', 'milliliter', 'centiliter', 'decaliter'),
5 : ('meter', 'kilometer', 'u0', 'mole', 'micron', 'millimeter', 'centimeter'),
6 : ('ampere',),
7 : ('kelvin',),
8 : ('speed_of_light', 'candela'),
9 : ('hertz',),
10: ('newton',),
11: ('exp1', 'e0'),
12: ('G',),
}
alternatives_keys = {
'h' : 1, 's' : 2, 'g' : 3, 'l' : 4, 'm' : 5,
'A' : 6, 'K' : 7, 'C' : 8, 'H' : 9, 'N' : 10,
'e' : 11,'G' : 12,
}
alternatives_current_defaults = {}
class GlypherConstant(GlypherPhraseGroup) :
get_sympy_code = None
value = None
toolbox = None
@classmethod
def parse_element(cls, parent, root, names, targets, operands, recommending, lead,
add_entities, am = None, top = True, args=None) :
ent = cls(parent, GlypherSymbol(parent, root.find('symbol').text))
return ent
def get_sympy(self) :
if self.get_sympy_code is not None :
return Dynamic.eval_for_sympy(self, self.get_sympy_code)
return self.value
def __init__(self, parent, entity) :
GlypherPhraseGroup.__init__(self, parent, [], area=[0,0,0,0])
self.set_rgb_colour((0.5, 0.5, 0.8))
self.mes.append('constant')
self.add_properties({'enterable' : False,
'have_alternatives' : False})
self.suspend_recommending()
self.append(entity)
#self.set_default_entity_xml()
constants = {}
class GlypherConstant_(GlypherConstant) :
value = None
symbol = None
me = None
constant_have_alternatives = False
parse_element = None
def __init__(self, parent) :
GlypherPhraseGroup.__init__(self, parent)
self.set_rgb_colour((0.5, 0.5, 0.8))
self.mes.append('constant')
if self.me is not None :
self.mes.append(self.me)
self.add_properties({'enterable' : False,
'have_alternatives' :
self.constant_have_alternatives})
self.suspend_recommending()
if self.symbol is not None :
self.append(GlypherEntity.xml_copy(parent, self.symbol))
self.set_default_entity_xml()
def change_alternative(self, dir = 1) :
ret = GlypherPhrase.change_alternative(self, dir=dir)
if ret:
for ind in alternatives :
alts = alternatives[ind]
if self.mes[-1] in alts :
i = alternatives_current_defaults[ind]
alternatives_current_defaults[ind] = (i+dir) % len(alts)
break
return ret
@classmethod
def new_from_symbol(cls, name, symbol, value, italicize=False,
cat="Constants", alternatives_cat=None) :
new_sym = GlypherSymbol(None, symbol, italic=italicize)
return cls.new_from_entity(name, new_sym, value, cat=cat,
alternatives_cat=alternatives_cat)
@classmethod
def new_from_symbol_sub(cls, name, symbol, sub,
value, italicize=False, it_sub=None, cat="Constants",
alternatives_cat=None) :
if it_sub is None :
it_sub = italicize
new_sym = GlypherSymbol(None, symbol, italic=italicize)
new_sub = GlypherSymbol(None, sub, italic=it_sub)
new_dec = Decoration.GlypherScript.subscript(None, area=(0,0,0,0),
expression=new_sym, subscript=new_sub)
return cls.new_from_entity(name, new_dec, value, cat=cat,
alternatives_cat=alternatives_cat)
@classmethod
def new_from_entity(cls, name, symbol, value, cat="Constants",
alternatives_cat=None) :
if cat is not None :
toolbox = {'symbol' : symbol.to_string(),
'category' : cat,
'priority' : None,
'shortcut' : None }
else :
toolbox = None
new_dict = {'me':name, 'value':value, 'symbol':symbol,
'toolbox':toolbox,'altname':name,'alternatives_cat':alternatives_cat}
new_sym = type('GlypherConstant_'+str(name), (cls,object,), new_dict)
for alts in alternatives :
alts = alternatives[alts]
if name in alts :
new_sym.constant_have_alternatives = True
new_sym.alternatives = alts
new_sym.altname = name
g.add_phrasegroup_by_class(name, new_sym, alt_cat=alternatives_cat)
return new_sym
constants['planck'] = GlypherConstant_.new_from_symbol('planck', 'h',
physics.units.planck,
italicize=True,
cat="Physical Constants",
alternatives_cat='planck')
constants['hbar'] = GlypherConstant_.new_from_symbol( 'hbar', u'\u0127',
physics.units.hbar,
italicize=True,
cat="Physical Constants",
alternatives_cat='planck')
constants['speed_of_light'] = GlypherConstant_.new_from_symbol( 'speed_of_light',
'c',
physics.units.speed_of_light,
italicize=True,
cat="Physical Constants")
constants['G'] = GlypherConstant_.new_from_symbol( 'G', 'G',
physics.units.speed_of_light,
italicize=True,
cat="Physical Constants")
constants['gee'] = GlypherConstant_.new_from_symbol( 'gee', 'g',
physics.units.speed_of_light,
italicize=True,
cat="Physical Constants",
alternatives_cat="gram")
constants['e0'] = GlypherConstant_.new_from_symbol_sub( 'e0', 'e', '0',
physics.units.e0,
italicize=True,
cat="Physical Constants")
constants['u0'] = GlypherConstant_.new_from_symbol_sub( 'u0', u'\u03BC', '0',
physics.units.u0,
italicize=True,
cat="Physical Constants")
constants['Z0'] = GlypherConstant_.new_from_symbol_sub( 'Z0', 'Z', '0',
physics.units.Z0,
italicize=True,
cat="Physical Constants")
constants['exp1'] = GlypherConstant_.new_from_symbol( 'exp1',
'e',Exp1(),
italicize=False,
cat="Mathematical Constants")
constants['exponential_e'] = GlypherConstant_.new_from_symbol( 'exp1', 'e',
Exp1(),
italicize=False,
cat="Mathematical Constants")
constants['empty_set'] = GlypherConstant_.new_from_symbol(
'empty_set',
u'\u2205',
EmptySet(),
italicize=False,
cat="Sets")
#constants['complex'] = GlypherConstant_.new_from_symbol(
# 'Complex',
# u'\u2102',
# Exp1(),
# italicize=False)
# (u'\u2102', '\\Complex'), \
# (u'\u2124', '\\Integer'), \
# (u'\u2115', '\\Natural'), \
# (u'\u211A', '\\Rational'), \
# (u'\u211D', '\\Real'), \
constants['infinity'] = GlypherConstant_.new_from_symbol(
'infinity',
u'\u221e',
Infinity(),
italicize=False,
cat="Mathematical Constants")
constants['imaginary_unit'] = GlypherConstant_.new_from_symbol(
'imaginary_unit',
'i',
ImaginaryUnit(),
italicize=False,
cat="Complex")
constants['realR'] = GlypherConstant_.new_from_symbol(
'realR',
u'\u211D',
None, italicize=False,
cat="Sets")
constants['rationalQ'] = GlypherConstant_.new_from_symbol(
'rationalQ',
u'\u211A',
None, italicize=False,
cat="Sets")
constants['complexC'] = GlypherConstant_.new_from_symbol(
'complexC',
u'\u2102',
None, italicize=False,
cat="Sets")
units = {}
class GlypherUnit_(GlypherConstant_) :
value = None
def __init__(self, parent) :
GlypherConstant_.__init__(self, parent)
self.set_rgb_colour((0.5, 0.8, 0.6))
self.suspend_recommending()
def auto_make_unit(name, symbol, cat=None, alternatives_cat=None) :
units[name] = GlypherUnit_.new_from_symbol(name, symbol,
physics.units.__dict__[name],
cat=cat,
alternatives_cat=alternatives_cat)
auto_make_unit('meter', 'm', cat="Units", alternatives_cat="meter")
auto_make_unit('gram', 'g', cat="Units", alternatives_cat="gram")
auto_make_unit('second', 's', cat="Units", alternatives_cat="second")
auto_make_unit('ampere', 'A', cat="Units")
auto_make_unit('kelvin', 'K', cat="Units")
auto_make_unit('mole', u'\u33d6', cat="Units")
auto_make_unit('candela', u'\u33c5', cat="Units")
auto_make_unit('liter', u'\u2113', cat="Units", alternatives_cat="liter")
auto_make_unit('hertz', u'\u3390', cat="Units")
auto_make_unit('newton', 'N', cat="Units")
auto_make_unit('millisecond', u'\u33b3', alternatives_cat='second')
auto_make_unit('microsecond', u'\u33b2', alternatives_cat='second')
auto_make_unit('nanosecond', u'\u33b1', alternatives_cat='second')
auto_make_unit('picosecond', u'\u33b0', alternatives_cat='second')
auto_make_unit('kilogram', u'\u338f', alternatives_cat='gram')
auto_make_unit('microgram', u'\u338d', alternatives_cat='gram')
auto_make_unit('milligram', u'\u338e', alternatives_cat='gram')
auto_make_unit('milliliter', u'\u3396', alternatives_cat='liter')
#auto_make_unit('decaliter', 'dl')
auto_make_unit('kilometer', u'\u339e', alternatives_cat='meter')
auto_make_unit('micron', u'\u339b', alternatives_cat='meter')
auto_make_unit('millimeter', u'\u339c', alternatives_cat='meter')
auto_make_unit('centimeter', u'\u339d', alternatives_cat='meter')
def make_word(string, parent, area = (0,0,0,0), auto_italicize=True) :
'''Make a Word from a string.'''
string = unicode(string)
word = GlypherWord(parent, area)
word.set_auto_italicize(auto_italicize)
for l in string :
word.append(GlypherSymbol(word, l))
return word
all_numbers = None
all_roman = None
def word_init() :
global all_numbers, all_roman
all_numbers = re.compile(g.is_floating_point_num_regex)
all_roman = re.compile('[A-Za-z]')
import Interpret
def interpret_sympy(p, sy) :
return Interpret.interpret_sympy(p, sy)
g.add_phrasegroup_by_class('constant', GlypherConstant) | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glypher/Word.py | Word.py |
import traceback
import lxml.etree as ET
import copy
import sympy
import math
from aobject.utils import *
import glypher as g
import gutils
from sympy.core.sympify import SympifyError
ac = gutils.array_close
fc = gutils.float_close
def _ipu_font_size_combined_coeff(s) :
'''Inherited property updater for fscc'''
if s.get_parent() is None :
return 1.
par_fscc = s.get_parent().get_ip('font_size_combined_coeff')
par_fsc = s.get_parent().get_ip('font_size_coeff')
return par_fscc * par_fsc
class GlypherConfiguration :
'''
Each entity is split over one (normally) or more Configs. These allow
entities to be built of multiple bboxes, and not necessarily be rectangular.
Particularly useful for word-wrapping, where an entity is split into two
configs and its parent keeps one config on one row and moves the other to
the next.
'''
# Link to our parent
entity = None
# Who is the subsequent config (for our assoc. entity)?
next = None
# Track changes to our bounding box
bbox = None
old_bbox = None
invisible_bbox = None
sub_pos = None
old_sub_pos = None
row = None
old_row = None
col = None
old_col = None
# AXIOM OF HOPE:
# If that basebox hasn't changed,
# neither will the alignment of
# children (assuming that any
# change in the children will
# generate a realignment anyhow)
basebox = None
old_basebox = None
# The associated entity stores alignment info about the rows in the
# baseboxes of the first config, elsewhere they're redundant
#FIXME: shouldn't these be in the same object as the row bboxes?
baseboxes = None
old_baseboxes = None
index = 0
# Which of our parent's configs do we reside in?
parent_config_index = 0
width = 0
def __init__(self, obj, index) :
# We can initialize either from an Entity (which will subsequently need
# to have a recalc) or by copying another Config
if isinstance(obj, GlypherEntity) :
self.entity = obj
self.bbox = [0,0,0,0]
self.old_bbox = [0,0,0,0]
self.invisible_bbox = None
self.sub_pos = 0
self.old_sub_pos = 0
self.basebox = (0,0,0,0,0,0)
self.old_basebox = (0,0,0,0,0,0)
self.old_baseboxes = {}
self.row = 0
self.old_row = 0
self.col = 0
self.old_col = 0
if obj.am('phrase') :
self.baseboxes = {}
elif isinstance(obj, GlypherConfiguration) :
self.copy_new(obj)
self.old_bbox = list(obj.old_bbox)
self.old_basebox = obj.old_basebox
self.old_baseboxes = {}
self.set_baseboxes(self.old_baseboxes, obj.old_baseboxes)
self.old_sub_pos = obj.old_sub_pos
self.old_row = obj.old_row
self.old_col = obj.old_col
else :
raise(RuntimeError("!!! wrong constructor type for GlypherConfiguration"))
# Add ourselves to our entity as config 'index'
self.entity.config[index] = self
self.index = index
def get_entity(self) :
'''Return associated entity'''
return self.entity
def to_string(self) :
'''
If we are associated with a phrase, we may have child configs - join
them to make a string, otherwise return the string rep of our associated
entity.
'''
if self.get_entity().am('phrase') :
return "".join([a.to_string() for a in self.child_configs])
return self.get_entity().to_string()
_full_changes = False
def get_changes(self) :
'''
Produce a string representation of the geometrical changes since the
last time this was reset.
'''
chgs = ""
if not ac(self.bbox, self.old_bbox) :
chgs += str(self.old_bbox)+" -> "+str(self.bbox)+"\n"
if self.sub_pos != self.old_sub_pos :
chgs += ":" + str(self.old_sub_pos)+" -> "+str(self.sub_pos)+"\n"
if self.row != self.old_row :
chgs += "---"+str(self.old_row)+" -> "+str(self.row)+"\n"
if self.col != self.old_col :
chgs += "|||"+str(self.old_col) + " -> "+str(self.col)+"\n"
if not ac(self.basebox, self.old_basebox) :
chgs += " basebox: "+str(self.old_basebox)+\
" -> "+str(self.basebox)+"\n"
short = (chgs != "" and not self._full_changes)
n = 0
if self.baseboxes :
for r in self.baseboxes :
if r not in self.old_baseboxes :
if short :
n += 1
else :
chgs += " no old basebox for row "+str(r)+"\n"
elif self.baseboxes[r] != self.old_baseboxes[r] :
if short :
n += 1
else :
chgs +=" basebox["+str(r)+"]: "+str(self.old_baseboxes[r])+\
" -> " + str(self.baseboxes[r]) + "\n"
if n > 0 :
chgs += " & "+str(n)+" of "+str(len(self.baseboxes))+" baseboxes\n"
chgs = chgs[:-1]
return chgs
def scale_baseboxes(self, s) :
'''
Rescale the baseboxes by a factor of s with respect to the bottom left
corner of the bbox (in local space, origin for rows)
'''
# Scale the master alignment basebox
self.basebox = self._scale_basebox(self.basebox,
self.bbox[0], self.bbox[3], s)
# If we're in local space, scale internal baseboxes wrt the origin
if self.entity.get_local_space() :
l, b = (0, 0)
else :
l, b = (self.bbox[0], self.bbox[3])
# Scale internal (row) baseboxes
if self.baseboxes :
for r in self.baseboxes :
self.baseboxes[r] = self._scale_basebox(self.baseboxes[r],
l, b, s)
def _scale_basebox(self, B, l, b, s) :
'''
The actually mechanics of scaling a given bbox
'''
return (l+(B[0]-l)*s, l+(B[1]-l)*s, l+(B[2]-l)*s,
b+(B[3]-b)*s, b+(B[4]-b)*s, b+(B[5]-b)*s)
def move_baseboxes(self, h, v) :
'''
Translate the baseboxes
'''
# Master alignment is simple translate
self.basebox = self._move_basebox(self.basebox, h, v)
# If we're not in local space, we'll need to move internal ones too
if self.baseboxes and not self.entity.get_local_space() :
for r in self.baseboxes :
self.baseboxes[r] = self._move_basebox(self.baseboxes[r], h, v)
def _move_basebox(self, b, h, v) :
'''
The actually mechanics of moving a given bbox
'''
return (b[0]+h, b[1]+h, b[2]+h, b[3]+v, b[4]+v, b[5]+v)
def get_row(self) :
'''What row are we?'''
return self.row
def get_col(self) :
'''What col are we?'''
return self.col
def get_bbox(self) :
'''Return the bbox'''
return tuple(self.bbox)
def get_sub_pos(self) :
'''Return the config's subpos in parent'''
return self.sub_pos
def get_basebox(self) :
'''Return the master alignment bbox'''
return self.basebox
def get_baseboxes(self) :
'''Return the internal alignment baseboxes'''
return self.baseboxes
def set_sub_pos(self, sub_pos) :
'''Set the config's subpos in parent'''
self.sub_pos = sub_pos
def set_bbox(self, bbox) :
'''Copy the bbox values to our bbox'''
for i in range(0,4) :
self.bbox[i] = bbox[i]
self.width = self.bbox[2] - self.bbox[0]
def update_basebox(self, only_recalc_self=False) :
'''Get the entity to recalc our basebox.'''
self.entity.recalc_basebox(config=self)
if self.entity.am('phrase') and not self.baseboxes :
self.baseboxes = {}
def check(self) :
'''Checks for basic geometric changes.'''
if not ac(self.bbox, self.old_bbox) or \
self.row != self.old_row or \
self.col != self.old_col or \
not ac(self.basebox, self.old_basebox) :
return True
return False
def copy_new(self, config) :
'''
Copy the members of config into our structure (deep)
'''
self.entity = config.entity
self.bbox = list(config.bbox)
self.basebox = config.basebox # this should always be a tup
if config.baseboxes :
self.baseboxes = {}
self.set_baseboxes(self.baseboxes, config.baseboxes)
self.sub_pos = config.sub_pos
self.row = config.row
self.col = config.col
def reset(self, only_recalc_self = False) :
'''
Reharmonize the old and current members, e.g. bbox->old_bbox. Note that
once this is called, the config will show no changes have occurred until
the next change.
'''
self.old_bbox = list(self.bbox)
self.old_sub_pos = self.sub_pos
self.old_row = self.row
self.old_col = self.col
self.old_basebox = self.basebox
if self.baseboxes :
self.set_baseboxes(self.old_baseboxes, self.baseboxes)
def set_baseboxes(self, baseboxes1, baseboxes2) :
'''
Copy a series of (tuple) baseboxes from bb2 to bb1
'''
for bsb in baseboxes2 :
if bsb not in baseboxes1 or baseboxes1[bsb] != baseboxes2[bsb] :
baseboxes1[bsb] = tuple(baseboxes2[bsb])
class GlypherEntity :
'''
Basic class for all Glypher components that can be combined into phrases,
etc.
'''
@staticmethod
def xml_copy(parent, ent) :
'''Create a new entity from old via XML'''
# Extract relevant XML
ent_xml = ET.ElementTree(ent.get_xml(\
targets={}, top=False, full=False))
# Build a new entity
#FIXME: parent!=None breaks scaling (e.g. responses list) with bold
# words in functions. WEIRD.
new_ent = parse_phrasegroup(parent, ent_xml, top=False)
return new_ent
parent = None
# For compat. with Phrase - None unless overridden
entities = None
# Used for the Caret if it wants to obtain a suggestion from this entity
recommending = None
# Set of names indicating what kind of entity I am
#FIXME: couldn't this just be replaced with isinstance ?
mes = None
# Gap around the entity
padding = None
# Toolbox info
toolbox = None
# Property dictionary for the entity
properties = None
# Default properties - used if not set in 'properties'
default_properties = None
# Which inherited properties does this entity override by default?
default_inherited_properties_overrides = None
# (+Inherited) Properties which shouldn't appear in the XML
hidden_properties = None
# Edit mode can be used to override attachability/enterability(?) settings
# temporarily, without actually changing them
edit_mode = False
# Indicates cache invalidated
redraw_required = False
# Dictionary of component configs
config = None
# Is this entity selected?
selected = False
error = False
_error_text = None
def set_error_note(self, text = None) :
'''Indicate an error has occurred.'''
if text is None :
self.error = False
elif text != self._error_text :
self.error = True
self._error_text = text
# Return a version of this entity for use as a symbol
def _get_symbol_string_real(self) :
'''
Return version for use as a symbol
Override this one, as non-_real handles subsitution
'''
me_str = self.to_string().replace(' ', '_')
return me_str
def _get_symbol_string(self, sub = True) :
'''
Call this to get a string for a sympy symbol from this entity
'''
me_str = str(self._get_symbol_string_real())
if me_str in g.sympy_specials :
return g.sympy_specials[self.to_string()]
return me_str
def clear_all_errors(self) :
'''Hide any error message.'''
self.error = False
def get_selected(self) :
'''Whether this entity is selected (!= in a selection)'''
return self.selected
def set_selected(self, selected) :
'''Set whether this entity is selected (!= in a selection)'''
self.selected = selected
def in_selected(self) :
'''Whether this entity is in a selection'''
if self.included() :
return self.get_selected() or self.get_parent().in_selected()
return self.get_selected()
def set_p(self, name, val) :
'''Set a property'''
if self.get_p(name) == val :
return
self.properties_changes[name] = self.properties_changes.get(name, 0) + 1
self.properties[name] = val
def get_p(self, name) :
'''Get a property'''
if name in self.properties :
return self.properties[name]
elif name in self.default_properties :
return self.default_properties[name]
return None
def set_bodmas_level(self, bodmas_level) :
'''Set BODMAS level'''
self.set_p('bodmas_level', bodmas_level)
def get_bodmas_level(self) :
'''Get BODMAS level'''
return self.get_p('bodmas_level')
#FIXME: remove cousin on entity deletion
def add_cousin(self, cousin) :
'''
A cousin is an entity that needs informed of a size change, but isn't
in the parent chain, e.g. a mirror entity
'''
self.cousins.append(cousin)
#FIXME: Should this be moved to Phrase?
def get_local_space(self) :
'''Is this entity evaluating internally in local space?'''
return self.get_p('local_space')
def get_name(self) :
'''Get string name for this entity'''
return self.get_p('name')
def set_name(self, name) :
'''Set string name for this entity'''
return self.set_p('name', name)
def get_align(self) :
'''Which direction should we align to parent?'''
return self.get_p('align')
def set_align(self, align) :
'''Which direction should we align to parent?'''
return self.set_p('align', align)
def get_visible(self) :
'''Are we visible?'''
return self.get_p('visible')
def set_visible(self, visible) :
'''Are we visible?'''
return self.set_p('visible', visible)
def get_blank(self) :
'''Do we skip draw? Unlike 'visible', this preserves space'''
return self.get_p('blank')
def set_blank(self, blank) :
'''Do we skip draw? Unlike 'visible', this preserves space'''
return self.set_p('blank', blank)
def get_breakable(self) :
'''Can this entity be broken into smaller configs?'''
return self.get_p('breakable')
def set_breakable(self, breakable) :
'''Can this entity be broken into smaller configs?'''
ret = self.set_p('breakable', breakable)
return ret
def get_attachable(self) :
'''Can this entity be attached to by the Caret?'''
return self.get_p('attachable')
def set_attachable(self, attachable, children_too=False) :
'''Can this entity be attached to by the Caret?'''
if self.am('phrase') and children_too :
for ent in self.get_entities() :
ent.set_attachable(attachable, children_too=True)
return self.set_p('attachable', attachable)
_IN = None
_OUT = None
def get_IN(self) :
'''
Should high-level inward facing actions be given to another entity
representing our interior?
'''
return self._IN
def set_IN(self, IN) :
'''
Should high-level inward facing actions be given to another entity
representing our interior?
'''
self._IN = IN
def get_OUT(self) :
'''
Should high-level outward facing actions be given to another entity
representing our exterior?
'''
return self._OUT
def set_OUT(self, OUT) :
'''
Should high-level outward facing actions be given to another entity
representing our exterior?
'''
self._OUT = OUT
def get_have_alternatives(self) :
'''
Do we have alternatives to switch through?
'''
return self.get_p('have_alternatives')
def set_have_alternatives(self, have_alternatives) :
'''
Do we have alternatives to switch through?
'''
return self.set_p('have_alternatives', have_alternatives)
def get_always_recalc(self) :
'''
Should we always recalc when our parent does?
'''
return self.get_p('always_recalc')
def set_always_recalc(self, always_recalc) :
'''
Should we always recalc when our parent does?
'''
return self.set_p('always_recalc', always_recalc)
def get_runted(self) :
'''
Do we have a parent that does not own us? That is, we inherit
properties, etc. but are not in the entities list
'''
return self.runted
def set_runted(self, runted) :
'''
Do we have a parent that does not own us? That is, we inherit
properties, etc. but are not in the entities list
'''
self.runted = runted
attached = False
def get_attached(self) :
'''
Are we the entity currently attached to the Caret
'''
return self.attached
def set_attached(self, attached) :
'''
Are we the entity currently attached to the Caret
'''
self.attached = attached
def get_vertical_ignore(self) :
'''
Should we ignore this entity when calculating vertical bbox/basebox
extents in the parent?
'''
return self.get_p('vertical_ignore')
def set_vertical_ignore(self, vertical_ignore) :
'''
Should we ignore this entity when calculating vertical bbox/basebox
extents in the parent?
'''
return self.set_p('vertical_ignore', vertical_ignore)
def get_horizontal_ignore(self) :
'''
Should we ignore this entity when calculating horizontal bbox/basebox
extents in the parent?
'''
return self.get_p('horizontal_ignore')
def set_horizontal_ignore(self, horizontal_ignore) :
'''
Should we ignore this entity when calculating horizontal bbox/basebox
extents in the parent?
'''
return self.set_p('horizontal_ignore', horizontal_ignore)
def get_enterable(self) :
'''
Can the Caret attach inside us (possibly to a child)?
'''
return self.IN().get_p('enterable')
def set_enterable(self, enterable, children_too=False) :
'''
Can the Caret attach inside us (possibly to a child)?
'''
if self.am('phrase') and children_too :
for ent in self.get_entities() :
ent.set_enterable(enterable, children_too=True)
return self.IN().set_p('enterable', enterable)
target_name = None
def get_xml(self, name=None, top=True, targets=None, full=False) :
'''
Return an XML representation of this entity
'''
if name is None :
name = self.get_name()
root = ET.Element(name)
# The final me in mes is the 'type'
root.set('type', self.mes[-1])
# If we have been asked to give a target name, do so.
if self.target_name is not None :
root.set('target', self.target_name)
self.bind_xml(root, top=top)
return root
def _xml_add_property(self, root, val) :
'''
Turn a property value into XML attributes of root
'''
if isinstance(val, tuple) :
root.set('type', type(val).__name__)
for t in val :
te = ET.SubElement(root, 'ti')
te.set('value', str(t))
te.set('type', type(t).__name__)
elif isinstance(val, GlypherEntity) :
root.set('type', 'entity')
root.set('value', val.get_name())
else :
root.set('value', str(val))
root.set('type', type(val).__name__)
def bind_xml(self, root, top=True) :
'''
Attach XML properties of this entity to root
'''
inherited_props_overrides = ET.Element('inherited_properties_overrides')
props = ET.Element('properties')
# We need extra information if we're at the top (top is more like
# saying, provide all information and don't assume we've heard of this
# type of entity)
if top :
mes = ET.Element('mes')
inherited_props = ET.Element('inherited_properties')
chs = ET.Element('characteristics')
# Make sure all our types are provided
for p in self.mes :
me = ET.SubElement(mes, 'me')
me.set('name', p)
# Add in any inherited_properties (at top, so none to be inherited
# above)
for p in self.inherited_properties :
if self.inherited_properties[p] is None :
continue
inherited_prop = ET.SubElement(inherited_props, 'ip')
inherited_prop.set('name', p)
self._xml_add_property(inherited_prop,
self.inherited_properties[p])
# Add in any characteristics - flags indicating particular
# properties
for p in self.characteristics :
ch = ET.SubElement(chs, 'ch')
ch.set('name', p)
# Only append these if necessary
if len(mes) > 0 :
root.append(mes)
if len(chs) > 0 :
root.append(chs)
if len(inherited_props) > 0 :
root.append(inherited_props)
# Add any category information
if self.toolbox is not None :
root.set('symbol', self.toolbox['symbol'])
root.set('category', self.toolbox['category'])
if self.toolbox['alternatives'] is not None :
root.set('alternatives', self.toolbox['alternatives'])
if self.toolbox['priority'] is not None :
root.set('priority', self.toolbox['priority'])
if self.toolbox['shortcut'] is not None :
root.set('shortcut', self.toolbox['shortcut'])
# Deal with any overrides of inherited properties
for p in self.inherited_properties_overrides :
# Certain situations where we still don't want to include XML for
# this
if self.inherited_properties_overrides[p] is None or \
p in self.hidden_properties or \
(p in self.default_inherited_properties_overrides and \
self.inherited_properties_overrides[p] ==\
self.default_inherited_properties_overrides[p]) :
continue
inherited_prop = ET.SubElement(inherited_props_overrides, 'ipo')
inherited_prop.set('name', p)
self._xml_add_property(inherited_prop,
self.inherited_properties_overrides[p])
if len(inherited_props_overrides) > 0 :
root.append(inherited_props_overrides)
# Deal with our own properties
for p in self.properties :
if self.properties[p] is None or \
p in self.hidden_properties or \
(p in self.default_properties and \
self.properties[p] == self.default_properties[p]) :
continue
prop = ET.SubElement(props, 'property')
prop.set('name', p)
self._xml_add_property(prop, self.properties[p])
if len(props) > 0 :
root.append(props)
# This is a common attribute that is tidier to include as a tag
# attribute rather than a def. IP override
if self.get_size_scaling() != \
self.default_inherited_properties_overrides['font_size_coeff'] :
root.set('scaling', str(self.get_size_scaling()))
# The padding is most efficiently included thusly
padd_string = ",".join(map(str,self.padding))
if padd_string != "0.0,0.0,0.0,0.0" :
root.set('padding', padd_string)
# Use with caution
#FIXME: don't use at all.
draw_offset = (0,0)
def IN(self) :
'''
Define the real object with which you should be dealing. Useful for
compound phrases where only one component is publically usable. Interior
component.
'''
if self.get_IN() is None :
return self
else :
return self.get_IN().IN()
def OUT(self) :
'''
Define the real object with which you should be dealing. Useful for
compound phrases where only one component is publically usable. Exterior
component.
'''
if self.get_OUT() is None :
return self
else :
return self.get_OUT().OUT()
def get_up(self) :
'''
Return our parent (or exterior's parent)
'''
return self.OUT().get_parent()
def recalc_basebox(self) :
'''
Recalculate our alignment basebox for each cfg
'''
for c in self.config :
cfg = self.config[c]
b = cfg.get_bbox()
cfg.basebox = (b[0], (b[0]+b[2])*0.5, b[2],
b[1], (b[1]+b[3])*0.5, b[3])
def set_padding_all(self, quant) :
'''
Set the padding the whole way around to a single value
'''
for i in range(0,4):
self.set_padding(i, quant, quiet=True)
self.recalc_bbox()
def set_padding(self, ind, quant, quiet = False) :
'''
Set padding of an individual side
'''
if quant == self.padding[ind] :
return
self.padding[ind] = quant
if not quiet :
self.recalc_bbox()
def is_edit_mode(self) :
'''
Whether we temporarily override attachability (and enterability?)
restrictions.
'''
ret = self.edit_mode or \
(self.included() and self.parent.is_edit_mode())
#debug_print((self.format_me(), ret))
return ret
def is_attachable(self) :
'''
Can we attach to this entity?
'''
val = (self.get_attachable() or self.is_edit_mode()) and \
self.get_visible() and not self.get_blank()
return val
def get_pow_options(self) :
'''
What ways can we raise this to a power? e.g. how do we execute this^3 in
a glypher expression
'''
return ('python',)
def get_caret_position(self, inside=False, pos=None) :
'''
Supposing the caret is attached to us, where should it display? Use
global coordinates
'''
if inside and self != self.IN() :
return self.IN().get_caret_position(inside=True, pos=pos)
if pos is None :
if inside :
l = self.config[0].get_basebox()[0] + self.padding[0]
else :
l = self.config[0].get_basebox()[2] - self.padding[2]
pos = [l, self.config[0].get_basebox()[5] - self.padding[3]]
if self.parent :
local_off = self.parent.get_local_offset()
pos = list(pos)
pos[0] += local_off[0]
pos[1] += local_off[1]
return pos
def blank(self) :
'''
Make this entity transparent.
'''
self.set_blank(True)
def unblank(self) :
'''
Unmake this entity transparent.
'''
self.set_blank(False)
def set_name(self, name) :
'''
Give this entity a name (appears in XML and some debug output).
'''
self.set_p('name', name)
def get_recommending(self) :
'''
Who do we recommend the caret attaches to if it asks?
'''
return self.recommending
_suspend_recommending = False
def suspend_recommending(self) :
'''
Ignore future requests for us to store new recommendations for caret
location
'''
self._suspend_recommending = True
def resume_recommending(self) :
'''
Resume acceptance of requests for us to store new recommendations for
caret location
'''
self._suspend_recommending = False
def set_recommending(self, recommending) :
'''
Set our recommendation for caret location in case it wants a suggestion
at some point.
'''
if self._suspend_recommending :
return
self.recommending = recommending
if self.included() :
self.get_parent().set_recommending(recommending)
def copy(self) :
'''
.. deprecated:: 0.2
Create a deep copy of this entity.
'''
# Remove links that may cause include parent phrases
p = self.parent
self.set_parent(None)
self.set_OUT(None)
self.cairo_cache_context = None
self.cairo_cache_image_surface = None
#cop = copy.deepcopy(self)
cop = GlypherEntity.xml_copy(None, self)
if cop.am('phrase') :
cop.config_collapse()
self.set_parent(p)
return cop
def get_main_phrase_property(self, prop) :
'''
Convenience routine to get a property from the main phrase rather than
the present entity.
'''
mp = self.get_main_phrase()
if mp is not None :
return mp.get_p(prop)
return None
def added(self) :
'''
Called once we've been added to a Phrase. This is a good place to put
process heavy stuff that we don't want to check every recalc, like
material changes (e.g. make_simplifications)
'''
if self.included() :
self.make_simplifications()
def make_simplifications(self) :
'''
Override to perform simplification checks, where a certain combination
of children, juxtaposition of child with parent, etc. may lead us to
want to materially change something. As an example, GlypherAdd checks
for 1 + (a+b) [nested sum] and simplifies it to 1 + a + b. Be
particularly careful of introducing infinite recursion.
'''
pass
# This is for finding PhraseGroups only so ignores entities (and phrases)
def inside_a(self, what, ignore=()) :
'''
Check upwards to see if we're in a 'what'. 'ignore' tells us what type
of *PhraseGroup* we're going to ignore. If we meet a PG that *isn't* one
of these, or we reach the top parent, we give up and declare defeat.
'''
if self.included() :
return self.get_parent().inside_a(what, ignore)
return None
x_offset = 0
y_offset = 0
def get_anchor(self, bbox = None) :
'''
Provides a nominal anchor for the entity (actually used less than you'd
think).
'''
if bbox == None :
bbox = self.config[0].get_bbox()
pos = [0,0]
al = self.get_align()
if al[0] == 'l' :
pos[0] = bbox[0]
elif al[0] == 'c' :
pos[0] = (bbox[0]+bbox[2])/2
else :
pos[0] = bbox[2]
if al[1] == 'b' :
pos[1] = bbox[3]
elif al[1] == 'm' :
pos[1] = (bbox[1]+bbox[3])/2
else :
pos[1] = bbox[1]
return pos
def get_local_offset(self) :
'''
Gets the 2-vec to add to internal coordinates to give global
coordinates. If neither we nor any of our parents are in local space,
this is just a zero vector.
'''
local_off = [0,0]
# Account for our own local space
if self.get_local_space() :
local_off = [self.config[0].bbox[0], self.config[0].bbox[3]]
# Request upwards
if self.parent :
local_up = self.parent.get_local_offset()
local_off[0] += local_up[0]
local_off[1] += local_up[1]
return tuple(local_off)
def draw_topbaseline(self, cr, shade=(1.0,1.0,1.0), force=False) :
'''
Draws our alignment guidelines onto cr (under bbox_mode)
'''
if not g.bbox_mode and not force :
return
cr.save()
cr.move_to(self.config[0].bbox[0], self.get_baseline())
cr.line_to(self.config[0].bbox[2], self.get_baseline())
cr.move_to(self.config[0].bbox[0], self.get_topline())
cr.line_to(self.config[0].bbox[2], self.get_topline())
col = [len(self.get_ancestors())*0.2]*3
col = [col[i]*shade[i] for i in range(0,2)]+[1.0]
cr.set_source_rgba(*col)
cr.stroke()
abox = self.config[0].get_bbox()
abox[2] -= abox[0]
abox[3] -= abox[1]
cr.rectangle(*abox)
col = [len(self.get_ancestors())*0.1]*3
col = [col[i]*shade[i] for i in range(0,2)]+[1.0]
cr.set_source_rgba(*col)
cr.stroke()
col = [len(self.get_ancestors())*0.1]*3
col = [col[i]*shade[i] for i in range(0,2)]+[1.0]
col[2] = 0
cr.set_source_rgba(*col)
anchor = self.get_anchor()
cr.move_to(anchor[0], anchor[1]+3)
cr.arc(anchor[0], anchor[1], 3, 0, math.pi*2)
cr.fill()
cr.restore()
def get_topline(self) :
'''
Get a topline alignment for this entity (bbox top unless overridden)
'''
bbox = self.config[0].get_bbox()
return bbox[1]
def get_baseline(self) :
'''
Get a baseline alignment for this entity (bbox bottom unless overridden)
'''
bbox = self.config[0].get_bbox()
return bbox[3]
def realign(self, quiet=False) :
'''
Override to do some realignment
'''
return
def set_in(self, innermost) :
'''
Set an innermost inside (you're more likely to want this than
:py:method::`set_IN`).
'''
innermost.set_OUT(self.IN())
self.IN().set_IN(innermost)
def get_deletable(self) :
'''
Can this entity be deleted?
'''
return self.get_p('deletable')
def set_deletable(self, deletable) :
'''
Can this entity be deleted?
'''
return self.set_p('deletable', deletable)
def get_parent(self) :
'''
Who is our parent?
'''
return self.parent
ref_width = 0
def get_ref_width(self) :
'''
What width are we? Not so useful for phrases, as they calc themselves,
but this should be updated before standard entity recalcs.
'''
return self.ref_width
def set_ref_width(self, ref_width) :
'''
What width are we? Not so useful for phrases, as they calc themselves,
but this should be updated before standard entity recalcs.
'''
self.ref_width = ref_width
ref_height = 0
def get_ref_height(self) :
'''
What height are we? Not so useful for phrases, as they calc themselves,
but this should be updated before standard entity recalcs.
'''
return self.ref_height
def set_ref_height(self, ref_height) :
'''
What height are we? Not so useful for phrases, as they calc themselves,
but this should be updated before standard entity recalcs.
'''
self.ref_height = ref_height
def add_properties(self, supp_dict, supp_ipo_dict=None) :
'''
Add new properties to the element (and possibly inherited properties)
setting the given values as defaults (i.e. won't be included in XML if
unchanged). If you include an already existing property, it updates the
default value to whatever you've given.
'''
self.default_properties.update(supp_dict)
if supp_ipo_dict is not None :
self.default_inherited_properties_overrides.update(supp_ipo_dict)
def is_wordlike(self) :
'''Should this be treated, in simplifications, etc. as a Word?'''
return self.get_p('wordlike')
def to_latex(self) :
'''
Provide a LaTeX representation. Note that this doesn't get called by
Word, so won't be used often without being overridden.
'''
me_str = self.to_string()
if me_str in g.interpretations :
me_str = g.interpretations[me_str]["latex"]
return str(me_str)
def __init__(self, parent = None) :
self.properties_changes = {}
self.default_properties = \
{'align' : ('l','m'),
'always_recalc' : False,
'auto_contract_premultiplication' : True,
'attachable' : True,
'visible' : True,
'blank' : False,
'horizontal_ignore' : False,
'override_keys' : False,
'local_space' : False,
'wordlike' : False,
'breakable' : False,
'deletable' : 1,
'have_alternatives' : False,
'force_no_bracket' : False}
self.properties = {}
# Font size coeff is complicated...
self.hidden_properties = [ 'font_size_coeff' ]
# Make up an erstwhile unique name. This may be overridden in inheritor
# inits
self.set_name('entity_'+str(id(self)))
# Whatever else I am, I'm an entity
self.mes = []
self.mes.append('entity')
# Entities that need kept in the loop about size changes
self.cousins = []
self.inherited_properties = {
'bold' : False,
'font_size' : 45.0,
'rgb_colour' : (0.0,0.0,0.0),
'font_name' : None,
'font_size_combined_coeff' : 1.0,
'font_size_coeff' : 1.0 }
self.inherited_properties_overrides = {}
self.default_inherited_properties_overrides = {
'font_size_coeff' : 1.0 }
self.inherited_properties_updaters = {
'font_size_combined_coeff': _ipu_font_size_combined_coeff }
# If the weight changes, we may change size
#NB: we don't recalc for a font size change as that should be all
# handled by scaling functions. In other words, don't change font_size*
# directly!!
self.inherited_properties_recalc_on_change = [
'bold' ]
self.inherited_properties_change = {}
self.config = {}
self.set_OUT(None)
self.set_IN(None)
self.characteristics = []
# If the caret wants to know who we think it should attach to, tell it
# to attach to us
self.set_recommending(self)
self.padding = [0., 0., 0., 0.]
# This adds a cfg to us
GlypherConfiguration(self, 0)
self.recalc_basebox()
self.config[0].reset()
# Use for properties but don't communicate
# until we're told to
self.set_parent(parent, runted=True)
def who_am_i(self) :
'''String expression of the mes'''
return "; ".join(self.mes)
def config_reset(self) :
'''
Tell all the configs to reset their old_bboxes (etc) to the current
versions (as we claim we've done any necessary consequent rejigging).
'''
for c in self.config :
self.config[c].reset()
def config_check(self) :
'''
Check whether any of the configs have changed since last reset.
'''
chk = False
for c in self.config :
chk = chk or self.config[c].check()
return chk
def feed_up(self, quiet=False, in_feed_chain=False) :
'''
Inform parents that we have changed and to adjust accordingly, then we
reset our configs and raise the redraw required flag
'''
do_feed_up = not quiet and \
(self.config_check() or g.anal_retentive_mode2) and \
self.included()
if do_feed_up :
self.get_parent().child_bbox_change(self)
self.config_reset()
if not in_feed_chain : self.child_altered(self)
# Make sure the cousins know of any size changes
for cousin in self.cousins :
cousin.recalc_bbox()
else :
self.config_reset()
self.set_redraw_required()
def set_redraw_required(self) :
'''
Invalidate any caching and require a redraw (feeds up)
'''
self.redraw_required = True
if self.included() :
self.parent.set_redraw_required()
def child_altered(self, child=None) :
'''
A child ('child') has changed. Tell parent (override to do more)
'''
if self.included() :
self.get_parent().child_altered(self)
def show(self, quiet=False) :
'''
Makes this entity visible again; as invisibility involves zero width,
this also makes the necessary bbox/basebox alterations and feeds up.
'''
if self.get_visible() :
return
self.set_visible(True)
for c in self.config :
cfg = self.config[c]
cfg.bbox[2] = cfg.bbox[0] + cfg.width
self.recalc_basebox()
self.feed_up(quiet=quiet)
def hide(self, quiet=False) :
'''
Makes this entity invisible, which involves zeroing the width, recalcing
the basebox and feeding up
'''
if not self.get_visible() :
return
self.set_visible(False)
for c in self.config :
self.config[c].bbox[2] = self.config[c].bbox[0]
self.recalc_basebox()
self.feed_up(quiet=quiet)
def format_loc(self) :
'''Return a string expression of the location.'''
return str(self.config[0].get_bbox())+':'+\
str(self.config[0].get_sub_pos())+'|'+\
str(self.config[0].get_row())+','+str(self.config[0].get_col())
def format_old_loc(self) :
'''Return a string expression of the previous location.'''
return str(self.config[0].old_bbox)+':'+\
str(self.config[0].old_sub_pos)+'|('+\
str(self.config[0].old_row)+','+str(self.config[0].get_col())+')'
def recalc_bbox(self, quiet=False) :
'''Recalculate the bounding box (from ref_width & ref_height)'''
config = self.config[0]
pad = self.padding
config.bbox[2] = config.bbox[0] + self.get_ref_width() +(pad[0]+pad[2])
config.bbox[1] = config.bbox[3] - self.get_ref_height() -(pad[1]+pad[3])
config.width = config.bbox[2]-config.bbox[0]
# If we moved the left side, we may need to get a new sub pos
if not fc(config.bbox[0],config.old_bbox[0]) and self.included() :
config.set_sub_pos(\
self.get_parent().get_free_sub_pos(config,
search_dir_fwd=(config.bbox[0]<config.old_bbox[0])))
# Account for (in)visibility
if not self.get_visible() :
config.bbox[2] = config.bbox[0]
# Update alignment
self.recalc_basebox()
change = config.check()
self.feed_up(quiet=quiet)
return change
def format_me(self) :
'''Return a string expression of self.'''
name = unicode('')
if self.get_name() is not None :
name = unicode(self.get_name()+':')
return u' \u25aa '.join([name+self.to_string(), self.who_am_i(),
'-'.join([o.mes[len(o.mes)-1] for o in self.get_ancestors()]),
str(self.get_scaled_font_size()),
self.format_loc()])
def get_main_phrase(self) :
'''Return overarching main phrase.'''
anc = self.get_ancestors()
top = anc[len(anc)-1]
if top.am('main_phrase') :
return top
return None
def get_ancestors(self) :
'''Return a list of ancestors, beginning with self.'''
anc = [self]
if self.get_parent() :
anc += self.get_parent().get_ancestors()
return anc
def set_parent(self, parent, runted=False) :
'''
Set parent as our parent; normally this shouldn't be called directly
but by append or by constructor. Can be useful for causing an entity to
take on properties of a parent without actually adding it (runted w.
parent)
'''
if parent == self :
raise StandardError("Can't make entity its own parent")
elif parent == None :
self.parent = None
self.set_runted(True)
else :
self.parent = parent
self.set_runted(runted)
# Tell this entity and all its parents that one of their children has
# changed (unless runted is True)
self.children_check()
def to_string(self, mode="string") :
'''
Return a *UNICODE* representation of this entity. Empty string unless
overridden.
'''
return unicode('')
def width(self) :
'''Width of bbox of first config.'''
return self.config[0].bbox[2] - self.config[0].bbox[0]
def height(self) :
'''Height of bbox of first config.'''
return self.config[0].bbox[3] - self.config[0].bbox[1]
def orphan(self) :
'''Remove from parent.'''
if self.included() :
self.get_parent().remove(self)
def included(self) :
'''
Is this a child entity and are we included in the parent (i.e. not
runted)?
'''
return self.get_parent() is not None and not self.get_runted()
def delete(self, if_empty=False) :
'''
Cleanly remove this entity from its environment. Returns its old parent
if one exists. Note that deletability is a property and not guaranteed.
'''
if self != self.OUT() :
return self.OUT().delete()
old_parent = None
if self.included() :
old_parent = self.get_parent()
# If we are deleteable and should orphan
if self.get_deletable() == 1 or self.get_deletable() == 3 :
self.squash()
self.orphan()
# If we are supposed to pass this delete request to our parent
if (self.get_deletable() == 2 or self.get_deletable() == 3) and \
old_parent is not None :
old_parent = old_parent.delete(sender=self)
return old_parent
# Ignores visibility setting if no parent (as not being included in a \
# longer string, so we must be being asked for a good reason!)
def string_compile(self, body, mode="string") :
'''Return body depending on visibility.'''
return body if (self.get_visible() or not self.included()) else ''
def am(self, what) :
'''Is 'what' in mes?'''
return what in self.mes
def am_c(self, what) :
'''Is 'what' in characteristics?'''
return what in self.characteristics
def get_line_size(self) :
'''What is the master font size (not inc. scalings)'''
return self.get_ip('font_size')
def get_scaled_line_size(self) :
'''What is the scaled font size? Probably what you're looking for!'''
fs = self.get_line_size() * self.get_line_size_coeff()
return fs
def get_line_size_coeff(self) :
'''
What is the scaling factor for font size? That is, what do we
multiply the font size by?
'''
return self.get_font_size_combined_coeff()
def show_decoration(self) :
'''Do we show decoration when drawing?'''
return self.included() and self.get_parent().show_decoration()
def get_size_scaling(self) :
'''What is the font size coeff (not inc our own setting)'''
return self.get_ip('font_size_coeff')
def set_size_scaling(self, scale) :
'''Set our own font size scaling'''
self.set_font_size_scaling(scale, quiet=True)
def set_line_size(self, size) :
'''Set the master font size.'''
self.set_font_size(size)
def get_font_size(self) :
'''What is the master font size (not inc. scalings)'''
return self.get_ip('font_size')
def get_scaled_font_size(self) :
'''What is the scaled font size? Probably what you're looking for!'''
fs = self.get_font_size() * self.get_font_size_combined_coeff()
return fs
def get_font_size_combined_coeff(self) :
'''What is the font size coeff (inc our own setting)'''
lc = self.get_ip('font_size_coeff')
lc *= self.get_ip('font_size_combined_coeff')
return lc
def set_font_size_scaling(self, scale, quiet=False) :
'''Set our own scaling of the font size.'''
a = self.get_scaled_font_size()
self.set_ip('font_size_coeff', scale, quiet=quiet)
# What is the ratio of old to new final font size?
a = self.get_scaled_font_size()/a
# Scaling contents accordingly
if not fc(a, 1.0) :
self.scale(a)
def set_font_name(self, name) :
'''What font do we use?'''
self.set_ip('font_name', name)
def set_font_size(self, size) :
'''Set the master font size.'''
a = self.get_scaled_font_size()
self.set_ip('font_size', size)
# What is the ratio of change?
a = self.get_scaled_font_size()/a
if not fc(a, 1.0) :
self.scale(a)
def check_inherited_properties(self) :
'''
Find out whether any of our inherited properties have changed. Update in
them in the process.
'''
resp = 0
for i in self.inherited_properties :
o = self.inherited_properties[i]
# Check whether our inherited version has changed and, post
# updating, whether an override exists such that we don't care
if self.inherited_properties[i] != \
self.update_inherited_property(i) and\
i not in self.inherited_properties_overrides :
# Set resp to 1, unless we, or a previous iteration, should
# express the need for a recalc
if resp != 2 and \
i not in self.inherited_properties_recalc_on_change :
resp = 1
else :
resp = 2
return resp
def __nonzero__(self) :
"""Ensure that __len__ does not invalidate our if statements."""
return True
def update_inherited_property(self, i) :
'''
Get the latest version of an inherited property and record it, returning
the result.
'''
# Nothing changes if we don't have a parent. Note that runted=True has
# no impact here (i.e. we don't write 'if self.included()')
if self.get_parent() is not None :
# If we don't have a special routine for updating, just grab our
# parent's value (NB: this makes it important that our parent is
# updated first)
if i not in self.inherited_properties_updaters :
ip = self.get_parent().get_ip(i)
else :
ip = self.inherited_properties_updaters[i](self)
# If there's a special routine for executing a change in this IP,
# run it, otherwise just set it directly in the IP dict. Note that
# this means any IP change routine is responsible for updating the
# IP dict.
if i in self.inherited_properties_change :
self.inherited_properties_change[i](ip)
else :
self.inherited_properties[i] = ip
return self.inherited_properties[i]
def _set_font_size_for_ip(self, n) :
'''IP change routine for font_size.'''
#FIXME: redundant
self.inherited_properties['font_size'] = n
def _set_font_size_coeff_for_ip(self, n) :
'''IP change routine for font_size_coeff.'''
self.inherited_properties['font_size_coeff'] = n
def _set_font_size_combined_coeff_for_ip(self, n) :
'''IP change routine for font_size_combined_coeff.'''
self.inherited_properties['font_size_combined_coeff'] = n
def get_ip(self, i, ignore_override=False) :
'''Get the value of an inherited property (includes overrides, etc.)'''
# If this is as easy as returning the override, do so
if i in self.inherited_properties_overrides and not ignore_override :
return self.inherited_properties_overrides[i]
# If there is a default override, same deal
elif i in self.default_inherited_properties_overrides and not ignore_override :
return self.default_inherited_properties_overrides[i]
# Otherwise we want the real deal
else :
return self.inherited_properties[i]
def set_ip(self, i, val, quiet=False) :
'''
Override an IP and call children_check to see if that has implications
down the road. Use val=None to cancel an override
'''
# Are we being asked to remove the IP override?
if val is None and i in self.inherited_properties_overrides :
del self.inherited_properties_overrides[i]
self.children_check(force=True, quiet=quiet)
# Are we being asked to add an IP override and does this change
# anything?
if val is not None and \
(i not in self.inherited_properties_overrides or \
self.inherited_properties_overrides[i] != val) :
self.inherited_properties_overrides[i] = val
self.children_check(force=True, quiet=quiet)
def children_check(self, parent_change=False, quiet=False, force=False) :
'''
Check whether any changes are required to our children. In practice,
this means check whether we have any outstanding changes to our IPs that
need to be filtered down, and consequent recalcs that need to be
executed.
'''
resp = self.check_inherited_properties()
# No harm, no foul.
if not force and resp == 0 :
return
# Derived classes should really override this if a parent change
# would directly affect them (other than a simple translate)
if not quiet and resp == 2 and (not parent_change or g.anal_retentive) :
self.recalc_bbox(quiet=quiet)
self.set_redraw_required()
def get_rgb_colour(self) :
'''What colour is this [IP]?'''
return self.get_ip('rgb_colour')
def set_rgb_colour(self, rgb_colour) :
'''Set colour [IP]'''
self.set_ip('rgb_colour', rgb_colour)
def next_alternative(self) :
'''If this entity has multiple alternatives, more to the next one.'''
self.change_alternative(1)
def prev_alternative(self) :
'''If this entity has multiple alternatives, more to the prev one.'''
self.change_alternative(-1)
alternatives = None
def alt_to_ent(self, a) :
'''
Create an actual entity from a member of the alternatives
dictionary
'''
return make_phrasegroup(self.get_parent(), a)
def change_alternative(self, dir=1) :
'''
Change the current entity to one of its alternatives
'''
altable = self.get_have_alternatives()
# If we can't, we maybe our parent can?
if not altable or len(self.alternatives) == 0 :
if self.included() :
return self.get_parent().change_alternative(dir)
else :
return False
# What's the index of the alternative in given dir?
n = self.alternatives.index(self.altname)
n += dir
n = n % len(self.alternatives)
# Create the alternative
alt = self.alt_to_ent(self.alternatives[n])
alt.alternatives = self.alternatives
alt.set_have_alternatives(True)
# Add the alternative and remove ourselves
#FIXME: why doesn't this use the exchange member of Phrase?
self.get_parent().append(alt, after=self)
self.get_parent().remove(self)
return True
def check_combination(self, shape, go_up=True) :
'''
This shape is to be added, if anyone wants to combine with it, speak
now.
'''
if self.included() and go_up :
return self.get_parent().check_combination(shape)
return False
def draw_alternatives(self, cr) :
'''
.. deprecated:: 0.2
'''
return False
active = False
def process_key(self, name, event, caret) :
'''
Check whether this key combination has a special meaning for this entity
or, if not, any of its parents
'''
altable = self.active and self.get_have_alternatives()
# By default we expect Up and Down to change the alternative; note this
# isn't necessary Up & Down without modifiers as Caret will only call
# this routine if it has reason to believe we want Entity specific
# handling, normally indicated by Super
if name == 'Up' and altable :
self.prev_alternative()
elif name == 'Down' and altable :
self.next_alternative()
elif name == 'k' and 'main_phrase' in self.mes :
ET.dump(gutils.xml_indent(self.get_xml()))
if self.included() :
return self.get_parent().process_key(name, event, caret)
return False
def to_clipboard(self, auto_paste=False, who=None, contents=False) :
'''
Call upwards to add ourselves (or 'who') to the clipboard; ultimately
calls the rudimentary signal handler of main phrase.
'''
if who is None :
who = self
if self.included() :
return self.get_parent().to_clipboard(auto_paste, who, contents)
return False
def process_button_release(self, event) :
'''
What should we do if asked to process a button release? Ask parent if
not overridden.
'''
if self.included() :
return self.get_parent().process_button_release(event)
return False
def process_button_press(self, event) :
'''
What should we do if asked to process a button press? Ask parent if
not overridden.
'''
if self.included() :
return self.get_parent().process_button_press(event)
return False
def process_scroll(self, event) :
'''
What should we do if asked to process a scroll? Ask parent if
not overridden.
'''
if self.included() :
return self.get_parent().process_scroll(event)
return False
def contains(self, point) :
'''
Is this point inside this entity's first config?
'''
#FIXME: extend to all configs (but check implications of that first)
return self.config[0].bbox[3] > point[1] and \
self.config[0].bbox[1] < point[1] and \
self.config[0].bbox[0] < point[0] and \
self.config[0].bbox[2] > point[0]
# from_nought is the converse of squash, where we act as if this magically appeared.
def from_nought(self, quiet = False) :
'''
This is a slightly optimized (as called from high-use function append)
way to tell the configs that they have gone from zero to full (normal)
width without resorting to a hide then a show.
'''
# Problems can occur with these fiddly functions, so check in
# anal_retentive mode.
if g.anal_retentive_mode and self.am('phrase') :
self.consistency_check_sub_poses()
self.recalc_bbox(quiet=True)
# Is this is currently visible, we've got a very quick way of doing this
if self.get_visible() :
for c in self.config :
self.config[c].old_bbox[2] = self.config[c].bbox[0]
self.feed_up(quiet=quiet)
# Otherwise, we expect the correct width to still be available
else :
self.set_visible(True)
for c in self.config :
cfg = self.config[c]
cfg.bbox[2] = cfg.bbox[0] + cfg.width
self.feed_up(quiet=quiet)
def squash(self, quiet = False) :
'''
Make the width zero (and do necessary calls). In current
implementation, this is equivalent to calling hide.
'''
self.hide(quiet=quiet)
def get_bodmas_level(self) :
'''What's the BODMAS level (if one exists)'''
if self.am_c('_bodmasable') :
return self.get_p('bodmas_level')
return None
def find_nearest(self, point, fall_through=True,
enterable_parent=False,
attachable=False,
row=0,
avoid=None) :
'''
Return the nearest attachable entity; in this case, it's self or
there isn't one.
'''
# Return fail if we have to avoid ourselves
if self == avoid :
return (-1, self)
dist = self.find_distance(point)
# If this is usable, return so.
if (enterable_parent and \
(self.get_parent() is None or \
not self.get_parent().is_enterable())) or \
(attachable and not self.is_attachable()) :
return (-1, self)
return (dist, self)
def get_width(self) :
'''Get width of first config.'''
return self.config[0].bbox[2] - self.config[0].bbox[0]
def get_height(self) :
'''Get height of first config.'''
return self.config[0].bbox[3] - self.config[0].bbox[1]
def get_basebox_height(self) :
'''Get height of alignment basebox (of first config).'''
return self.config[0].basebox[5] - self.config[0].basebox[3]
def draw(self, cr) :
print 'GlypherEntity is an abstract class!'
def scale(self, s, quiet=False) :
'''Scale this entity (from the bottom left)'''
if fc(s, 1.0) :
return
self._scale_enact(s)
self.feed_up(quiet=quiet)
def _scale_enact(self, s) :
'''Do the hard word of scaling this entity.'''
# Do the bbox scaling for each config
#FIXME: shouldn't this be relative to the BL of the first cfg?
for c in self.config :
cfg = self.config[c]
cfg.width *= s
bb = cfg.bbox
bb[2] = bb[0] + (bb[2]-bb[0])*s
bb[1] = bb[3] - (bb[3]-bb[1])*s
# Scale baseboxes (of the first config) accordingly
self.config[0].scale_baseboxes(s)
# Update the reference dimensions
self.set_ref_height(self.get_ref_height()*s)
self.set_ref_width(self.get_ref_width()*s)
# Update the padding
for i in range(0,4) :
self.padding[i] *= s
def move_to(self, x, y) :
'''Move to a point (in local coords)'''
self.translate(x - self.config[0].bbox[0], y - self.config[0].bbox[1])
def translate(self, h, v, quiet=False, by_offset=True, after=None,
config=None, do_reset=True) :
'''Move this entity by h, v'''
if after is None :
after = (h<0)
self._translate_enact(h, v, after, config=config)
if do_reset :
self.feed_up(quiet=quiet)
def config_collapse(self, quiet = False) :
'''Collapse any configs. [not impl]'''
#FIXME: why does this not do anything? Should entity only ever have one?
pass
def config_break(self, x_pos, quiet = False, do_break = False) :
'''Split any configs. [not impl]'''
#FIXME: why does this not do anything? Should entity only ever have one?
return None
def _translate_enact(self, h, v, search_dir_fwd=True, config=None) :
'''Do the hard work of translation.'''
cfgs = self.config if config is None else (config,)
for c in cfgs :
cfg = self.config[c]
cfg.bbox[0] += h
cfg.bbox[2] += h
cfg.bbox[1] += v
cfg.bbox[3] += v
cfg.move_baseboxes(h, v)
# If we need to update the subpos, do so
if abs(h) > 1e-5 and self.included() :
cfg.old_sub_pos = cfg.sub_pos
cfg.sub_pos = self.get_parent().get_free_sub_pos(cfg,
search_dir_fwd=search_dir_fwd)
def find_distance(self, point) :
'''How far from point are we? L2 norm.'''
dist = 0
# If we're between the ends
if point[0] > self.config[0].bbox[0] and \
point[0] < self.config[0].bbox[2] :
# and we're between the sheets
if point[1] > self.config[0].bbox[1] and \
point[1] < self.config[0].bbox[3] :
dist = 0
# but we're not between the sheets
else :
# Measure from closest end
dist = min(abs(point[1]-self.config[0].bbox[1]),
abs(point[1]-self.config[0].bbox[3]))
# If we're only between the sheets
elif point[1] > self.config[0].bbox[1] and \
point[1] < self.config[0].bbox[3] :
dist = min(abs(point[0]-self.config[0].bbox[0]),
abs(point[0]-self.config[0].bbox[2]))
# If this is off on a diagonal, shortest distance to a corner
else :
dists = []; corns = ( (0,1), (0,3), (2,1), (2,3) )
for c in corns :
dists.append(math.sqrt((point[0]-self.config[0].bbox[c[0]])**2 + \
(point[1]-self.config[0].bbox[c[1]])**2 ) )
dist = min(dists)
return dist
def get_repr(self) :
'''
Get a string (not unicode) representation of this object directly
from the Sympy object.
'''
try :
val = self.get_sympy()
return str(val)
except SympifyError as e :
debug_print(str(e))
return None
title = None
def set_title(self, title) :
'''Provide a pretty, usu. for type.'''
self.title = title
def get_title(self) :
'''Provide a pretty, usu. for type.'''
return self.title
# Normally we won't want the caret to flag up existence of info
indicate_info = False
info_text = None
def set_info_text(self, info_text) :
'''Provide some user documentation for this entity.'''
self.info_text = info_text
def get_info_text(self) :
'''Provide some user documentation for this entity.'''
return self.info_text
wiki_link = None
def set_wiki_link(self, wiki_link) :
'''Provide a wiki link for this entity.'''
self.wiki_link = wiki_link
def get_wiki_link(self) :
'''Provide a wiki link for this entity.'''
return self.wiki_link
import Parser
def parse_phrasegroup(parent, tree, ops=None, top=True) :
'''Calls Parser.parse_phrasegroup'''
return Parser.parse_phrasegroup(parent, tree, ops, top)
def make_phrasegroup(parent, name, operands=None, properties=None) :
'''Calls Parser.make_phrasegroup'''
return Parser.make_phrasegroup(parent, name, operands, properties) | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glypher/Entity.py | Entity.py |
import glypher as g
from aobject.utils import debug_print
import StringIO
import cairo
import time
import copy
import traceback
import sympy
import draw
import Entity
import Parser
import gutils
import lxml.etree as ET
# Comparison functions
ac = gutils.array_close
fc = gutils.float_close
fgte = gutils.float_greater_than_or_equal
fgt = gutils.float_greater_than
flt = gutils.float_less_than
# Motion tolerance; when we give up aligning
sm_tol = g.sm_tol
bg_tol = g.bg_tol
_testing_phr = None
GLYPHERDIRECTION_UP = 1
GLYPHERDIRECTION_DOWN = -1
default_keep_min_row_height = True
class GlypherPhrase(Entity.GlypherEntity) :
'''
Main container class
'''
rows = None
cols = None
row_offsets = None
col_offsets = None
row_aligns = None
col_aligns = None
row_bboxes = None
col_bboxes = None
row_redirects = None
col_redirects = None
name = None
is_caching = False
cached_xml = None
width = 0
# Default binary expression containment rule
stop_for_binary_expression_default = False
# Exceptions to the rule
stop_for_binary_expression_exceptions = ()
def stop_for_binary_expression(self, name) :
"""
If a BinaryExpression is suggested inside this PhraseGroup, do we force
it to render below here, or check whether we should continue checking
upwards for an extant BinaryExpression to tag it on to.
"""
return self.stop_for_binary_expression_default ^ \
(name in self.stop_for_binary_expression_exceptions)
def __len__(self) :
"""Return the number of entities in this entity (not .IN()!)."""
return len(self.entities)
def __getitem__(self, key) :
"""Return the key-th item in the sorted entities (not .IN()!)."""
if not isinstance(key, int) :
raise KeyError("""
Looking for non-int index in a Phrase; did you want
a PhraseGroup?
""")
return self.sort_entities()[key]
def is_leading_with_num(self) :
'''
Tells us whether this function starts with a digit, for visual
formatting purposes
'''
return len(self.rows) == 1 and len(self.get_row(0)) > 0 and \
self.get_row(0)[0].get_entity().am('phrase') and \
self.get_row(0)[0].get_entity().is_leading_with_num()
def set_row_col_defaults(self) :
'''
Note the default row/col_aligns for XML omission, if unchanged.
'''
self.default_row_aligns = self.row_aligns.copy()
self.default_col_aligns = self.col_aligns.copy()
def get_bold(self) :
return self.get_ip('bold')
def set_bold(self, bold) :
self.set_ip('bold', bold)
def get_entities(self) :
'''
Return (a tuple copy of) the list of entities
'''
if self != self.IN() : return self.IN().get_entities()
return tuple(self.entities)
def get_row(self, r, only_visible = False) :
'''
Return configs in a given row, potentially restricting to visible.
'''
cfgs = self.sort_configs()
if only_visible :
return filter(\
lambda c : c.row==r and c.get_entity().get_visible(), cfgs)
else :
return filter(lambda c : c.row==r, cfgs)
def get_col(self, r, only_visible = False) :
'''
Return configs in a given col, potentially restricting to visible.
'''
cfgs = self.sort_configs()
if only_visible :
return filter(\
lambda c : c.col==r and c.get_entity().get_visible(), cfgs)
else :
return filter(lambda c : c.col==r, cfgs)
def set_col_align(self, r, align) :
'''
Set the alignment of a given column.
'''
self.col_aligns[r] = align
self.recalc_bbox()
def set_row_align(self, r, align) :
'''
Set the alignment of a given row.
'''
self.row_aligns[r] = align
self.recalc_bbox()
def set_default_entity_xml(self) :
'''
Log the XML at creation to work out whether its changed come compilation
(and so needs stored)
'''
for e in self.entities :
self.default_entity_xml[e] = \
ET.tostring(e.get_xml(name=e.get_name(), top=False))
def get_xml(self, name=None, top=True, targets=None, full=False) :
'''
Retrieve an XML expression for this Phrase and its contents.
'''
root = Entity.GlypherEntity.get_xml(self, name, top, full=full)
ents = ET.Element('entities')
ent_list = self.sort_entities()
# Add in all of the contained entities
for e in ent_list :
r = e.get_xml(name=e.get_name(), top=False, targets=targets,
full=full)
if e.am('target_phrase') :
# If this is a target, note it
if targets is not None and e in targets.values() :
r.set('target', e.get_name())
continue
# Include placement information
if e.config[0].row != 0 :
r.set('row', str(e.config[0].row))
if e.config[0].col != 0 :
r.set('col', str(e.config[0].col))
m = ' '.join(map(str, e.padding))
if m != "0.0 0.0 0.0 0.0" :
r.set('padding', m)
# Mention what this is
r.set('type', e.mes[len(e.mes)-1])
# If this wasn't the same when we set the default XML, include it
if e not in self.default_entity_xml or \
ET.tostring(r) != self.default_entity_xml[e] :
ents.append(r)
if len(ents) > 0 :
root.append(ents)
# Things to get rid of
#FIXME: does this do anything?
rms = ET.Element('removes')
for e in self.default_entity_xml :
if e not in self.entities :
ET.SubElement(rms, e.get_name())
if len(rms) > 0 : root.append(rms)
# Store details of the rows themselves
rows = ET.Element('rows')
for r in self.rows :
row = ET.Element('row')
row.set('id', str(r))
if self.row_offsets[r] is not None :
row.set('offset', str(self.row_offsets[r]))
if r not in self.default_row_aligns or \
self.row_aligns[r] is not self.default_row_aligns[r] :
row.set('align', self.row_aligns[r])
if len(row.keys()) > 1 : rows.append(row)
if len(rows) > 0 : root.append(rows)
# Store details of the cols themselves
cols = ET.Element('cols')
for r in self.cols :
col = ET.Element('col')
col.set('id', str(r))
if self.col_offsets[r] is not None :
col.set('offset', str(self.col_offsets[r]))
if r not in self.default_col_aligns or \
self.col_aligns[r] is not self.default_col_aligns[r] :
col.set('align', self.col_aligns[r])
if len(col.keys()) > 1 : cols.append(col)
if len(cols) > 0 : root.append(cols)
return root
# FIXME: why aren't we passing the quiet through !?
def children_check(self, parent_change=False, quiet=False, force=False) :
'''
See if some change to the inherited properties needs filtered down
'''
resp = self.check_inherited_properties()
if not force and resp == 0 : return
# This assumes that if a recalc is necessary, it will happen when one of
# the children changes
if self.entities and len(self.entities) > 0 :
for ent in self.entities :
ent.children_check(parent_change=parent_change, quiet=quiet)
elif not quiet and resp == 2 :
self.recalc_bbox(quiet=quiet)
self.child_altered()
if g.anal_retentive_mode :
self.consistency_check_sub_poses()
self.set_redraw_required()
def is_enterable(self) :
'''
Can the caret rest directly inside this?
'''
if self != self.IN() :
return self.IN().is_enterable()
return (self.get_enterable() or self.is_edit_mode()) and \
self.get_visible() and not self.get_blank()
def child_change(self) :
'''
Feed up a change for the parent
'''
if self.included() :
self.get_parent().child_change()
def delete(self, sender=None, if_empty=False, quiet=False) :
'''
Get rid of this entity.
if_empty - only delete if no children
'''
if not if_empty or len(self.get_entities()) == 0 :
return Entity.GlypherEntity.delete(self)
# Tell parent
if not quiet :
self.child_change()
def empty(self, override_in = False) :
'''
Dump all of the children
'''
if not override_in and self != self.IN() :
self.IN().empty()
return
l = list(self.entities)
for ent in l :
self.remove(ent)
def __init__(self, parent, area=(0,0,0,0), line_size_coeff=1.0,
font_size_coeff=1.0, align=('l','m'), auto_fices=False,
align_as_entity = False) :
# Generate dictionaries and lists belonging to this object
self.entities = []
self.rows = []
self.cols = []
self.col_bboxes = {}
self.col_offsets = {}
self.col_aligns = {}
self.col_redirects = {}
self.row_bboxes = {}
self.row_offsets = {}
self.row_aligns = {}
self.row_redirects = {}
Entity.GlypherEntity.__init__(self, parent)
# Add child_configs member to our first config - only useful
# for phrases
self.config[0].child_configs = []
# Give self a provisional name
self.set_name('phrase_'+str(id(self)))
# Set default properties
self.add_properties(\
{'default_offset_per_row' : 30,
'blank_ratio' : 0.0,
'align_as_entity' : False,
'is_decorated': False,
'enterable' : True,
'min_height' : None,
})
# Not currently displaying alternatives
self.active = False
# Generate an initial bbox
self.config[0].set_bbox(area)
self.config[0].baseboxes = {}
# Add my type
self.mes.append('phrase')
# Minimal row/col
self.add_row(0)
self.add_col(0)
self.set_row_col_defaults()
# No children are currently active
self.child_active = False
# Reset & recalc
self.recalc_baseboxes()
self.config[0].reset()
self.recalc_bbox(quiet = True)
# If anyone's looking for a caret target, pick me
self.set_recommending(self)
# Save XML setup as default
self.default_entity_xml = {}
self.set_default_entity_xml()
# Set the font size scaling from the init args
self.set_font_size_scaling(font_size_coeff)
def elevate_entities(self, new_parent, adopt=False, to_front=False) :
'''
Lift this phrase's children to a new_parent
'''
if self.IN() != self :
self.IN().elevate_entities(new_parent, adopt, to_front)
return
l = list(self.entities)
for ent in l :
self.remove(ent)
# Move to the front of this element before reattaching
if to_front :
ent.translate(\
self.OUT().config[0].bbox[0]-ent.config[0].bbox[0],0)
if adopt :
new_parent.adopt(ent)
else :
new_parent.append(ent)
def exchange(self, former, latter) :
'''
Swap an entity for a child
'''
# Add after the current child
self.append(latter, after=former, row=former.config[0].row,
col=former.config[0].col)
# Orphan the current child
former.orphan()
self.set_recommending(latter)
def set_child_active(self, child_active, desc) :
'''
Set the child_active flag
'''
self.child_active = child_active
# This goes for parents too
if self.included() :
self.get_parent().set_child_active(child_active, desc)
def set_col_redirect(self, col, to_phrase) :
'''
Rather than following standard search, Caret should go here when asked
to enter column.
'''
self.col_redirects[col] = to_phrase
def get_col_redirects(self) :
'''
Where are the column redirects going?
'''
if self != self.OUT() :
return self.OUT().get_col_redirects()
return self.col_redirects
def set_row_redirect(self, row, to_phrase) :
'''
Rather than following standard approach, Caret should go here when asked
to enter row.
'''
self.row_redirects[row] = to_phrase
def get_row_redirects(self) :
'''
Where are the row redirects going?
'''
if self != self.OUT() :
return self.OUT().get_row_redirects()
return self.row_redirects
def to_string(self, mode = "string") :
'''
Generate a unicode representation of this Phrase by concatenating string
representations of its contents
'''
if not self.get_visible() :
return unicode("")
elif self.get_blank() :
return unicode(" ")
ents = self.sort_entities()
body = unicode('').join([l.to_string(mode) for l in ents])
return self.string_compile(body, mode)
def to_latex(self) :
'''
Generate a LaTeX representation of this Phrase by concatenating LaTeX
representations of its contents
'''
if not self.get_visible() :
return ""
elif self.get_blank() :
return " "
ents = self.sort_entities()
body = ''.join([l.to_latex() for l in ents])
return body
def get_sympy(self) :
'''
Return a sympy version of this entity. If it isn't a 1-entity phrase,
this must be overridden
'''
if len(self.IN().entities) == 0 :
return None
if len(self.IN().entities) == 1 :
return self.IN().entities[0].get_sympy()
raise RuntimeError(\
"get_sympy: Multiple ents in phrase without get_sympy overridden")
def _translate_enact(self, h, v, search_dir_fwd, config = None) :
'''
Do the hard labour of moving this object.
'''
Entity.GlypherEntity._translate_enact(self, h, v, search_dir_fwd,
config=config)
# If this object creates a local space, its internals should not move.
if self.get_local_space() :
return
#FIXME: Tidy to avoid multiple recalcs
#FIXME: Just eww.
for r in self.row_bboxes :
# If we have multiple configs and no rows (or child rows) ???
#FIXME: what's going on here?
if len(self.config) > 1 and config is not None and \
len(set(self.rows) & \
set([c.row for c in self.config[config].child_configs]))==0:
continue
# if we have multiple configs and not all rows appear in this config
elif len(self.config) > 1 and config is not None and \
len(set(self.rows) - \
set([c.row for c in self.config[config].child_configs])) > 0:
self.recalc_bbox(quiet=True, realign=False)
# otherwise, we expect the row bboxes will just be the direct
# translation of what they were
else :
self.row_bboxes[r][0] += h
self.row_bboxes[r][2] += h
self.row_bboxes[r][1] += v
self.row_bboxes[r][3] += v
# Same for columns
for c in self.col_bboxes :
if len(self.config) > 1 and config is not None and \
len(set(self.cols) & \
set([d.col for d in self.config[config].child_configs]))==0:
continue
elif len(self.config) > 1 and config is not None and \
len(set(self.cols) - \
set([d.col for d in self.config[config].child_configs])) > 0:
self.recalc_bbox(quiet=True, realign=False)
else :
self.col_bboxes[c][0] += h
self.col_bboxes[c][2] += h
self.col_bboxes[c][1] += v
self.col_bboxes[c][3] += v
def translate(self, h, v, quiet=False, by_offset=True, after=None,
config=None, do_reset=True) :
'''
Move self and children, ideally without full recalc
after - whether this should receive first or last subpos once moved
(default : (h<0))
'''
if after is None :
after = (h<0)
# If we are in local space and are moving our first (reference) config,
# we must instead move everything else the other way - potentially slow,
# but usually outweighed by advantage of never having to translate the
# first config (in local space mode). Its a bit like being the first
# person in a bus queue and moving three side-steps towards the bus.
# You've moved, but if the coordinate space is local to the queue
# (of which you are the head), actually everybody else just moved three
# steps away.
reverse_move = (config is 0 and self.get_local_space() and \
len(self.config) > 1)
# If we have child entities and are not the only config in a local
# space, move the children
if len(self.entities) > 0 and (not self.get_local_space() or \
(config not in (0,None)) or \
reverse_move) :
lh = h; lv = v
# In local space, the anchor point is on config 0, so moving 0
# is equivalent to moving all other configs the opposite direction
#FIXME: isn't this just reverse_move ?
if self.get_local_space() and config is 0 and len(self.config) > 1 :
lh = -h; lv = -v
cfgs = filter(lambda c : c.parent_config_index != 0,
self.sort_configs(rev=(lh>0)))
else :
cfgs = self.sort_configs(rev=(lh>0), config=config)
# Actually move all (the entities providing) the component configs
for cfg in cfgs :
ent = cfg.get_entity()
ent.translate(lh, lv, quiet=True, by_offset=by_offset,
config=cfg.index)
# Actually move our own structure
self._translate_enact(h, v, after, config=config)
# If movements are being saved up before feeding up, let our caller deal
# with it
if do_reset :
self.feed_up(quiet=quiet)
def scale(self, s, quiet = False) :
'''
Try to scale this entity and children without a recalc
'''
# Don't scale if we're (close enough) scaling by 1.0
if fc(s, 1.0) :
return
# Look at children if we have any
if len(self.entities) > 0 :
cfgs = self.sort_configs()
# Define our new bottom-left offset
if self.get_local_space() :
l, b = (0,0)
else :
l = self.config[0].bbox[0]; b = self.config[0].bbox[3]
# Translate all the configs to their new bottom left corners
for cfg in cfgs :
ent = cfg.get_entity()
d1 = cfg.bbox[0]-l; d2 = cfg.bbox[3]-b
ent.translate(d1*(s-1), d2*(s-1), quiet=True, config=cfg.index)
# Scale them where they stand
for ent in self.entities :
ent.scale(s, quiet=True)
# Scale our own structure
self._scale_enact(s)
#self.recalc_bbox(quiet=True)
if len(self.rows) > 0 or len(self.cols) > 0 :
self.recalc_bbox(quiet=quiet)
else :
# Tell the world
self.feed_up(quiet=quiet)
def _scale_enact(self, s) :
'''
Mechanics of scaling our this entity's structure
'''
Entity.GlypherEntity._scale_enact(self, s)
return
def get_configs(self, config=None, only_visible=False) :
'''
Pick out all our child configs
'''
# Collect all child configs or just child configs of one child
if config is None :
cfgs = [cfg for cfgs in \
[self.config[c].child_configs for c in self.config] \
for cfg in cfgs]
else :
cfgs = self.config[config].child_configs
# Return all configs or only the visible ones
if only_visible :
return filter(lambda c : c.get_entity().get_visible(), cfgs)
else :
return cfgs
def get_free_sub_pos(self, child, search_dir_fwd=True) :
'''
Find a free sub position for a child config, that is an unoccupied index
amongst the children sharing a point for their left bound
'''
cfgs = self.get_configs()
new_sub_pos = 0
for cfg in cfgs :
# Ignore this cfg if it isn't matched on left
if not fc(cfg.bbox[0], child.bbox[0]) or cfg.row != child.row or \
cfg.col != child.col :
continue
# Ignore this config if it's the child
if cfg == child :
continue
# Increment or decrement new_sub_pos if this config has taken it
if search_dir_fwd :
if cfg.sub_pos >= new_sub_pos :
new_sub_pos = cfg.sub_pos+1
elif cfg.sub_pos <= new_sub_pos :
new_sub_pos = cfg.sub_pos-1
return new_sub_pos
def format_entities(self) :
'''
List the format_me's for each child entity
'''
return "\n".join([str(e.format_me()) for e in self.entities])
def consistency_check_sub_poses(self) :
'''
As one of the first indications of an alignment/movement screw-up is
often the appearance of two entities at the same point and subpos, this
convenience function can be called to make sure it hasn't happened.
'''
for ent in self.entities :
for a in self.entities :
if a != ent and \
fc(a.config[0].bbox[0], ent.config[0].bbox[0]) and\
a.config[0].sub_pos == ent.config[0].sub_pos and \
a.config[0].row == ent.config[0].row and \
a.config[0].col == ent.config[0].col :
raise RuntimeError(\
'Two elements found sharing same position, '+\
'sub-position and row')
def get_new_sub_pos_after(self, after) :
'''
Adds a subpos directly after the config 'after', moving subsequent
configs along if necessary
'''
parent_config = self.config[after.parent_config_index]
for cfg in parent_config.child_configs :
if not fc(cfg.bbox[0], after.bbox[0]) \
or cfg.row != after.row or cfg.col != after.col :
continue
if cfg.sub_pos > after.sub_pos :
cfg.sub_pos += 1
if g.anal_retentive_mode :
self.consistency_check_sub_poses()
return after.sub_pos + 1
def sort_entities(self, rev=False, old=False, only_visible=False) :
'''
Return a sorted copy of the entities. This is sorted in a unique ordered
determined by a the entity's first child's col, row, pos and subpos
'''
if only_visible :
ents = filter(lambda c : c.get_visible(), self.entities)
else :
ents = list(self.entities)
# If we're empty, return trivial list
if len(ents) == 0 :
return []
# Direction int
d = -1 if rev else 1
# Compare pos's
b = lambda x : x.config[0].old_bbox[0] if old else x.config[0].bbox[0]
# Compare subpos's
c = lambda x : x.config[0].old_sub_pos if old else x.config[0].sub_pos
ents.sort(lambda x,y :\
-d if x.config[0].col < y.config[0].col else
d if x.config[0].col > y.config[0].col else
-d if x.config[0].row < y.config[0].row else
d if x.config[0].row > y.config[0].row else
-d if flt(b(x), b(y)) or (fc(b(x), b(y)) and flt(c(x), c(y))) else \
d if flt(b(y), b(x)) or (fc(b(x), b(y)) and flt(c(y), c(x))) else \
0 )
return ents
def sort_configs(self, rev=False, old=False, config=None) :
'''
Like the ent sorting, but we consider all individual configs
'''
cfgs = self.get_configs(config)
d = -1 if rev else 1
if len(cfgs) == 0 : return []
b = lambda x : x.old_bbox[0] if old else x.bbox[0]
c = lambda x : x.old_sub_pos if old else x.sub_pos
cfgs.sort(lambda x,y :\
-d if x.col < y.col else
d if x.col > y.col else
-d if x.row < y.row else
d if x.row > y.row else
-d if flt(b(x), b(y)) or (fc(b(x), b(y)) and flt(c(x), c(y))) else \
d if flt(b(y), b(x)) or (fc(b(x), b(y)) and flt(c(y), c(x))) else \
0 )
return cfgs
def child_altered(self, child = None) :
'''
Tell any parent that a child has been altered
'''
if self.included() :
self.get_parent().child_altered(self)
def child_bbox_change(self, child) :
'''
One of our child element bboxes has changed! Move any children that need
to move to compensate.
'''
if g.anal_retentive_mode and self.get_parent() :
self.get_parent().consistency_check_sub_poses()
for c in child.config :
child_cfg = child.config[c]
# What is the bbox of this config?
cb = child_cfg.get_bbox()
# What was the bbox of thie config?
cob = list(child_cfg.old_bbox)
cfgs = self.sort_configs(rev=False, old=True)
# We're looking for knock-on effect on the rest of our configs;
# where do we start?
root = cfgs.index(child_cfg)
# We'll need to calculate knock-on of left bdy movement, then right
# bdy movement; should we go up the way or down?
go_down = (cb[0]>cob[0], cb[2]>cob[2])
# Where do we start from?
i = root if go_down[0] else -1
# Make sure something's actually changed
if not ac(child_cfg.bbox, child_cfg.old_bbox) :
for n in (0, 1) :
d = -1 if go_down[n] else 1
i += d
# If the left bdy has moved, make sure we have (the right) free
# subpos.
# Irrelevant the first time through, as overwritten on second
if not fc(cb[0], cob[0]) :
child_cfg.sub_pos = \
self.get_free_sub_pos(child_cfg, search_dir_fwd=True)
# Loop through the configs on whichever side
while i >= 0 and i < len(cfgs) and i != root :
ecfg = cfgs[i]
ent = ecfg.get_entity()
if ecfg == child_cfg :
continue
eb = list(ecfg.bbox)
# If child on the right of this cfg
if (flt(eb[2], cob[2])) and \
ecfg.row == child_cfg.row and \
ecfg.col == child_cfg.col :
ent.translate(cb[0] - cob[0], 0, quiet=True,
config=child_cfg.index)
# If child on the left
if (fgte(eb[2], cob[2]) and \
(fgt(cob[2], cob[0]) or (fgt(eb[0], cob[0]) or \
(fc(eb[0], cob[0]) and \
ecfg.sub_pos > child_cfg.old_sub_pos)))) and \
ecfg.row == child_cfg.row and \
ecfg.col == child_cfg.col :
ent.translate(cb[2] - cob[2], 0, quiet=True,
config=child_cfg.index)
i += d
# Very specific case (guess how many screeds of bbox output
# it took to find this bug) where we are in the second
# sorting loop, the child cfg is zero width, its right end
# hasn't moved and the current cfg under consideration has a
# left end on that boundary. Then we may need to give the
# current cfg a new subpos. Best way to think about this
# (and the whole routine) is as a deck of cards, spread out
# in a line with potential for overlap. When the left edges
# match, to provide a consistent order and to move them back
# and forward in order, we must assign vertical positions
# when the left edges match. Now imagine what happens if you
# take a card and change its width where it sits - what
# happens to the cards on either side and what order do you
# have to move them to preserve the distances and ordering?
if n==1 and fc(cb[0], cb[2]) and \
fc(cob[2], cb[2]) and fc(eb[0], cob[2]) :
ecfg.sub_pos = self.get_free_sub_pos(ecfg,
search_dir_fwd=(not go_down[1]))
ecfg.old_sub_pos = ecfg.sub_pos
#Where does the second loop start from?
i = len(cfgs) if go_down[1] else root
# This whole routine cries out "TROUBLE HERE!!!"; consistency check
# is slow but not a bad idea for AR mode.
if g.anal_retentive_mode and self.get_parent() :
self.get_parent().consistency_check_sub_poses()
child_cfg.old_sub_pos = child_cfg.sub_pos
# Make sure any child movement we have initiated is incorporated (this
# will also do the job of calling the same routine in any parent we
# might have.
self.recalc_bbox(in_feed_chain=True)
def entities_by_name(self) :
'''
Return an (ordered) list of child entity names
'''
ents = self.sort_entities()
return [p.get_name() for p in ents]
def format_configs(self, collapsed=True) :
'''
Like format_entities but concatenating config details (or providing a
list of them)
'''
cfgs = self.sort_configs()
format_list = []
for p in cfgs :
name = '"'+str(p.to_string())+'"'
if name is not None :
name += ' ['+str(p.to_string())+']'
format_list.append(name+'['+str(p.index)+']' + ' ' + str(p.bbox) +\
":" + str(p.row) + '~' + str(p.col) + '\n')
return "; ".join(format_list) if collapsed else format_list
def col_range(self) :
'''
Return min and max col indices
'''
u,l = (0,0)
for r in self.cols :
if r > u : u = r
if r < l : l = r
return (u,l)
def row_range(self) :
'''
Return min and max row indices
'''
u,l = (0,0)
for r in self.rows :
if r > u : u = r
if r < l : l = r
return (u,l)
def add_col(self, r, col_align='m') :
'''
Add a rew column at index r (which will be changed to one beyond current
limit if too big or small)
'''
# Don't try to add an existing col
if r in self.cols :
return
u,l = self.col_range()
if r < l :
r = l - 1
elif r > u :
r = u + 1
self.cols.append(r)
self.col_aligns[r] = col_align
# Starting, default offset
offset = self.get_p('default_offset_per_row')*r
# Initial bbox
self.col_bboxes[r] = [self.config[0].bbox[0]+offset, self.get_topline(),
self.config[0].bbox[2]+offset, self.get_baseline()]
# It's possible an overriding offset has already been set (even if
# column hasn't existed). If so, don't overwrite it
if r not in self.col_offsets :
self.col_offsets[r] = None
return r
def add_row(self, r, row_align='l') :
'''
Add a rew row at index r (which will be changed to one beyond current
limit if too big or small)
'''
if r in self.rows : return
u,l = self.row_range()
if r < l :
r = l - 1
elif r > u :
r = u + 1
self.rows.append(r)
self.row_aligns[r] = row_align
offset = self.get_p('default_offset_per_row')*r
self.row_bboxes[r] = [self.config[0].bbox[0], self.get_topline()+offset,
self.config[0].bbox[0], self.get_baseline()+offset]
#FIXME: why do these two statements not match col version?
self.row_offsets[r] = None
self.recalc_row_basebox(r)
return r
#FIXME: Check this, it probably doesn't work
def remove_col(self, r) :
'''
Delete a column
'''
# Make sure we aren't deleting the 0th col
if r not in self.cols or r == 0 :
return
self.cols.remove(r)
del self.col_bboxes[r]
del self.col_offsets[r]
del self.col_aligns[r]
# Move other cols in to avoid gaps
u,l = self.col_range()
if r > 0 :
for i in range(r+1, u+1) :
re = self.get_col(i)
for e in re : e.col -= 1
elif r < 0 :
for i in range(l+1, r+1) :
i = r+l-i
re = self.get_col(i)
for e in re : e.col += 1
self.recalc_bbox()
#FIXME: Check this, it probably doesn't work
#FIXME: Why doesn't this match col?
def remove_row(self, r) :
'''
Delete a row
'''
if r not in self.rows or r == 0 :
return
u,l = self.row_range()
if r > 0 :
for i in range(r+1, u+1) :
re = self.get_row(i)
for e in re : e.row -= 1
self.row_bboxes[i-1] = self.row_bboxes[i]
self.row_aligns[i-1] = self.row_aligns[i]
self.row_offsets[i-1] = self.row_offsets[i]
del self.row_aligns[u]
del self.row_offsets[u]
del self.row_bboxes[u]
self.rows.remove(u)
elif r < 0 :
for i in range(l+1, r+1) :
i = r+l-i
re = self.get_row(i)
for e in re : e.row += 1
self.row_bboxes[i+1] = self.row_bboxes[i]
self.row_aligns[i+1] = self.row_aligns[i]
self.row_offsets[i+1] = self.row_offsets[i]
del self.row_aligns[l]
del self.row_offsets[l]
del self.row_bboxes[l]
self.rows.remove(l)
self.recalc_bbox()
def get_cell_bbox(self, r, c) :
'''
Get a bounding box for a specific row and column combo
'''
# All configs in this row and col
cfgs = list(set(self.get_row(r)) & set(self.get_col(c)))
# If we don't have any cfgs, return a zero-width bbox between row bbox
# limits
if len(cfgs) == 0 :
if self.col_offsets[c] is None :
l = self.col_bboxes[c][0]
else :
l = self.col_offsets[c]+self.col_bboxes[0][0]
return [l, self.row_bboxes[r][1], l, self.row_bboxes[r][3]]
# Start with first cfg and expand to include all others
bbox = list(cfgs[0].bbox)
for c in cfgs :
ent = c.get_entity()
if not ent.get_horizontal_ignore() and c.bbox[0] < bbox[0] :
bbox[0] = c.bbox[0]
if not ent.get_vertical_ignore() and c.bbox[1] < bbox[1] :
bbox[1] = c.bbox[1]
if not ent.get_horizontal_ignore() and c.bbox[2] > bbox[2] :
bbox[2] = c.bbox[2]
if not ent.get_vertical_ignore() and c.bbox[3] > bbox[3] :
bbox[3] = c.bbox[3]
return bbox
def append(self, entity, quiet=False, recalc=True, row=0, col=0,
override_in=False, move=(True,True), align=None, after=None) :
'''
Add an entity to the children. This, along with recalc_bbox and
child_bbox_change are where it all happens in this object.
'''
if self.IN() != self and not override_in :
self.IN().append(entity, quiet, recalc, row=row, col=col)
return
entity = entity.OUT()
rel_scale = entity.get_scaled_font_size()
# Make sure this entity is indeed parentless
if entity.included() :
raise RuntimeError('Trying to append an already included entity: '+\
str(entity.format_me())+\
' into '+str(self.format_me()))
# If anybody asks where to go, tell them whatever the entity
# (at this moment) suggests
self.set_recommending(entity.get_recommending())
# Add rows and cols if necessary
#FIXME: what happens if the row and col added aren't row & col?
if row not in self.rows :
row = self.add_row(row)
self.config[0].reset()
if col not in self.cols :
col = self.add_col(col)
self.config[0].reset()
# Calculate the row and col offsets that adjust the entity's offset from
# the Phrase's anchor
row_offset = self.row_bboxes[row][3] - self.config[0].bbox[3]
col_offset = self.col_bboxes[col][2] - self.config[0].bbox[2]
if self.row_offsets[row] is not None :
row_offset = self.row_offsets[row]
if self.col_offsets[col] is not None :
col_offset = self.col_offsets[col]
# Get the limits of the current row-col cell
cell_bbox = self.get_cell_bbox(row, col)
# Pick out the final config of whatever we're locating the entity after
if after is not None :
ac = after.config[len(after.config)-1]
# Pick the appropriate anchor to translate the entity
if after == self :
l = cell_bbox[0]
elif after != None and after in self.entities :
l = ac.bbox[2]
else :
l = cell_bbox[2]
# Cancel any word wrapping and make sure we're only dealing with one
# config in this entity
entity.line_length = -1
entity.config_collapse()
# Set the child config's row and column
entity.config[0].row = row
entity.config[0].col = col
# Move the entity to it's new location
h, v = (0,0)
if move[0] :
h = l-entity.config[0].bbox[0]
if move[1] :
v = self.config[0].bbox[3]-entity.config[0].bbox[3]+row_offset
entity.translate(h, v, quiet=True, after=False)
# If we are inserting after a zero-length child, we'll need to make sure
# that the new child config's subpos is after the 'after' child's
if after is not self and after is not None and \
fc(ac.bbox[0], ac.bbox[2]) :
entity.config[0].sub_pos = self.get_new_sub_pos_after(ac)
entity.config[0].old_sub_pos = entity.config[0].sub_pos
else :
entity.config[0].sub_pos = \
self.get_free_sub_pos(entity.config[0],
search_dir_fwd=(after is None))
#FIXME: Umm... why is this here?
if entity is None :
return
# If we're setting alignment, let the entity know
if align != None :
entity.align = align
# Finally add it to our list of entities
self.entities.append(entity)
# Pick our config into which the child config should go
#FIXME: shouldn't this depend on the spread of after's cfgs?
if after is None or after in self.entities :
config_ind = 0
else :
config_ind = ac.index
# Tell all of the entity's child configs which this is
#FIXME: shouldn't this always be zero? Or will the translate have
#FIXME: potentially resplit it?
for c in entity.config :
entity.config[c].parent_config_index = config_ind
# Finally add the config to the list of child configs
self.config[config_ind].child_configs.append(entity.config[0])
# Ensure the entity config old matches new
entity.config[0].old_sub_pos = entity.config[0].sub_pos
# No idea what this is. Probably completely redundant
global _testing_phr
_testing_phr = self
# If we were a non-zero width blank and are no longer, a non-trivial
# size change has occurred. Do a full recalc
if len(self) == 1 and self.get_p('blank_ratio') > 0.0 :
self.recalc_bbox()
if g.anal_retentive_mode and entity.am('phrase') :
entity.consistency_check_sub_poses()
# Tell the entity who we are
entity.set_parent(self)
if g.anal_retentive_mode :
if entity.am('phrase') :
entity.consistency_check_sub_poses()
self.consistency_check_sub_poses()
# Make sure any inherited properties of ours get transmitted down to the
# entity and to its children, ad nauseum
entity.children_check()
# The only way a size change should be initiated, other than to a
# set_font* call, is here (and remove), where the parent changes. As
# scale works once for all the children, we do not want to call it
# multiple times and therefore call it now rather than inside
# the cascading inherited properties chain
rel_scale = entity.get_scaled_font_size()/rel_scale
if not fc(rel_scale, 1.0) :
entity.scale(rel_scale)
# Get the entity to call our child_bbox_change by pretending to have
# been zero-width and expanding to its full width : "Of course I was
# here all along, I was just hiding!"
entity.from_nought(quiet=quiet)
if g.anal_retentive_mode :
if entity.am('phrase') :
entity.consistency_check_sub_poses()
self.consistency_check_sub_poses()
# Tell any phrases above us that a child has (materially) changed - this
# is saved for heavy duty checks that would be inefficient to call for
# simple resizings
self.child_change()
# If the child is the kind of phrase that wants to pretend it's our
# inside let it (i.e. high-level interior stuff like general child
# adding is directed to it, it directs its high-level exterior stuff to
# us)
if entity.am_c('_in_phrase') :
self.set_in(entity)
entity.set_deletable(2)
self.recalc_bbox()
self.child_change()
# If the entity wants to do anything about the fact it's just been added
# to us, this is the moment.
entity.added()
# Tell our caller which row the entity got added to
return row
def add_config(self, config) :
'''
Sticks another config on the end (and adds a child_configs member if it
doesn't have one.
'''
self.config[len(self.config)] = config
config.child_configs = []
line_length = -1
wrapped = 0
def word_wrap(self, quiet=False, instigator=True) :
'''
Splits the phrase into multiple configs
'''
# If no parent is forcing us to split and we don't need to (i.e. we
# haven't overrun the line length limit) then do nothing.
#FIXME: wait what's the point of instigator?
if instigator and self.line_length <= 0 :
return
# Break (the relevant) config distance self.line_length from LHS
pr = self.config_break(self.config[0].bbox[0]+self.line_length)
# If we have two halves, it worked, if not, nothing happened.
if pr is not None :
# pr should be two configs - the halves of whatever config we split
# (iirc)
m, n = pr
# note that this implementation can't handle configs spanning
# multiple rows
#FIXME: is this still true?
# As we're word wrapping, we still need to move the second half down
# a row
cfgs = n.child_configs
# Find the last row in the second half
r = max(cfgs, key=lambda a:a.row).row
# Add a new row if necessary
if r+1 not in self.rows :
self.add_row(r+1)
# Move all of the second half configs down a row. The recalc below
# will pull them to the start of the row, so we don't need to worry
# about that.
#FIXME: what if something's on that row?
for cfg in cfgs :
cfg.row += 1
#FIXME: what's this all about?
cfgs = self.sort_configs(config=m.index)
#FIXME: is this necessary?
self.wrapped += 1
# Adjust our own structure accordingly
self.recalc_bbox(quiet=quiet)
# We may need to wrap again, run this to do so if necessary
self.word_wrap(quiet=True)
# Now that we've done all that wrapping, check our bbox, etc.
# structure
self.recalc_bbox(quiet=quiet)
# Tell any parent that we've materially changed
if not quiet :
self.child_altered()
def remove(self, entity, override_in=False) :
'''
Take an entity out of the children
'''
entity = entity.OUT()
if self.IN() != self :
self.IN().remove(entity)
return
# Shrink to zero width (forces everything else to align)
entity.squash()
# This should be triggered by squash (if needed?)
if g.anal_retentive_mode :
self.recalc_bbox()
# Take the config and entities out of our lists
c = entity.config.values()
rel_scale = entity.get_scaled_font_size()
self.entities.remove(entity)
for cfg in c :
self.config[cfg.parent_config_index].child_configs.remove(cfg)
cfg.parent_config_index = 0
# Make sure the entity knows it no longer has a parent
entity.set_parent(None)
# Try and remove row & col - presumably (?) this will do nothing if the
# row or col is non-empty
for row in self.rows :
self.remove_row(row)
for col in self.cols :
self.remove_col(col)
# Make sure the entity is in sync with its defaults for any properties
# it had been inheriting from us (esp. font_size)
entity.children_check()
# Return the entity's scale to 1.
rel_scale = entity.get_scaled_font_size()/rel_scale
if not fc(rel_scale, 1.0) :
entity.scale(rel_scale)
# Make sure our structure is adjusted accordingly
self.recalc_bbox()
# Inform any parent of material change
self.child_change()
# This allows us to add a first object and move it to our position
def adopt(self, entity, quiet=False, row=0, go_inside=True) :
"""Clears children and adds entity. Uses IN()."""
# Make sure we have an Entity and, moreover, we have the Outside of it
if entity is None :
return
entity = entity.OUT()
# Make sure we have the Inside of ourselves
if go_inside and self.IN() != self :
self.IN().adopt(entity, quiet)
return
# Empty if necessary
self.empty()
# Append
self.append(entity, quiet=quiet, row=row)
def find_nearest(self, point, fall_through=True, enterable_parent=False,
row=None, col=None, attachable=False, avoid=None) :
'''
Find the nearest child to a point.
'''
dist = self.find_distance(point)
# If we require an enterable parent but are neither enterable nor
# allowed to look inside our children, we might as well give up now.
if not fall_through and enterable_parent and not self.is_enterable() :
return (-1, self)
# If the point is to our left, or the point is inside us and we can't
# fall through, return ourselves if we may
if (not enterable_parent or self.is_enterable()) and \
(not attachable or self.is_attachable()) and\
(point[0] <= self.config[0].bbox[0] or \
(not fall_through and
(point[0] >= self.config[0].bbox[0] and \
point[0] < self.config[0].bbox[2]) and \
(point[1] >= self.config[0].bbox[1] and \
point[1] <= self.config[0].bbox[3]))) and \
self != avoid :
return (dist, self)
nrtup = (-1, self)
# What is the location of this point when considered inside us?
if self.get_local_space() :
lpoint = (point[0]-self.config[0].bbox[0],
point[1]-self.config[0].bbox[3])
else :
lpoint = point
# Check through child entities
for ent in self.entities :
# If we can't see this entity or it's in the wrong r/c continue
if not ent.get_visible() or \
(row is not None and ent.config[0].row != row) or \
(col is not None and ent.config[0].col != col):
continue
if fall_through :
# Try looking inside this entity
newtup = ent.find_nearest(lpoint, fall_through=True,
enterable_parent=enterable_parent,
attachable=attachable, avoid=avoid)
elif ent != avoid :
# Work out the distance to this entity
newtup = (ent.find_distance(point), ent)
# If this entity is closer than any checked so far, update the
# nearest tuple to be its tuple (if we are allowed this ent)
if (not attachable or newtup[1].is_attachable()) and \
newtup[0] >= 0 and (nrtup[0] < 0 or newtup[0] < nrtup[0]) :
nrtup = newtup
# If we did not find one but can use ourselves, do so
if nrtup[0] == -1 :
if (self.is_enterable() or not enterable_parent) and \
(not attachable or self.is_attachable()) and \
self != avoid :
nrtup = (dist, self)
else :
nrtup = (-1, self)
return nrtup
def show_decoration(self) :
'''
Do we show visual decoration for this Phrase?
'''
return Entity.GlypherEntity.show_decoration(self) or\
self.get_p('show_decoration')
def set_active(self, active) :
'''
Set this entity as active (and tell any parent)
'''
self.active = active
self.set_child_active(active, self)
def clear_all_errors(self) :
'''
Recursively hide error messages
'''
Entity.GlypherEntity.clear_all_errors(self)
for ent in self.entities :
ent.clear_all_errors()
def _real_draw(self, cr) :
'''
Do the hard work of drawing
'''
# Handle any error indication
if self.error :
cr.save()
cr.set_source_rgba(1.0, 0.8, 0.4, 1.0)
area=(self.config[0].bbox[0]-2, self.config[0].bbox[2]+2,
self.config[0].bbox[1]-2, self.config[0].bbox[3]+2)
draw.trace_rounded(cr, area, 5)
cr.fill()
cr.restore()
# Don't draw what can't be seen
if not self.get_visible() or self.get_blank() :
return
#FIXME: get rid of this hack - it's probably not even being used
if self.draw_offset != (0,0) :
cr.translate(*self.draw_offset)
# If we're supposed to decorate, do
if self.show_decoration() :
self.decorate(cr)
# Give us a restore point before drawing
cr.save()
# Translate to compensate for local coordinate system if necessary
if self.get_local_space() :
cr.translate(self.config[0].bbox[0], self.config[0].bbox[3])
# Draw each of our children
for ent in self.entities :
ent.draw(cr)
# Restore the context
cr.restore()
#FIXME: see above
if self.draw_offset != (0,0) :
cr.translate(-self.draw_offset[0], -self.draw_offset[1])
# Down cache flag
self.redraw_required = False
def draw(self, cr=None, transparent=True) :
'''
Draw this entity to a context
'''
# If we're caching, load from ImageSurface, otherwise draw straight on
if self.is_caching or cr is None :
# Create image surface if necessary
if self.redraw_required or self.cairo_cache_image_surface is None \
or cr is None:
#FIXME: padding should be settable, rather than constant!!
self.cairo_cache_image_surface = \
cairo.ImageSurface(cairo.FORMAT_ARGB32,
int(self.get_width())+20,
int(self.get_height())+20)
cc = cairo.Context(self.cairo_cache_image_surface)
if not transparent :
cc.set_antialias(cairo.ANTIALIAS_NONE)
cc.set_source_rgb(1.0, 1.0, 1.0)
cc.rectangle(0, 0, int(self.get_width())+20,
int(self.get_height())+20)
cc.fill()
# Move to middle of padded surface
cc.translate(10-self.config[0].bbox[0],
10-self.config[0].bbox[1])
# Do the work
self._real_draw(cc)
#FIXME: redundant?
self.redraw_required = False
# Load from image surface
ci = self.cairo_cache_image_surface
if cr is not None :
cr.save()
cr.translate(self.config[0].bbox[0]-10, self.config[0].bbox[1]-10)
cr.set_source_surface(ci, 0, 0)
cr.paint()
cr.restore()
else :
# Draw straight on
self._real_draw(cr)
#FIXME: Do we really want to set r,c to 0,0
def config_collapse(self, quiet = False) :
'''
If we are composed of multiple configs, collapse them into one config.
'''
# Pick the config into which everything else will get stuffed
cfg = self.config[0]
if self.get_local_space() :
ol, ob = (self.config[0].bbox[0], self.config[0].bbox[3])
else :
ol, ob = (0,0)
# Collapse all of our children in the primary config first
cl = list(cfg.child_configs)
for c in cl :
c.get_entity().config_collapse(quiet=True)
# Move through our own configs in order
while cfg.next is not None :
pcfg = cfg
cfg = cfg.next
horiz_off = pcfg.bbox[2]-cfg.bbox[0]
# Collapse all of the children in this entity and make sure
# their row and column are 0, 0
cl = list(cfg.child_configs)
for c in cl :
c.get_entity().config_collapse(quiet=True)
c.row = 0; c.col = 0
# Move to end of previous config and vertical middle
# align at end SHOULD tidy up
self.translate(horiz_off, pcfg.basebox[4]-cfg.basebox[4],
quiet=True, after=(horiz_off>0), config=cfg.index)
# Go through and add children of other configs to primary config
cfg = self.config[0]
while cfg.next is not None :
pcfg = cfg; cfg = cfg.next
self.config[0].child_configs += cfg.child_configs
for c in cfg.child_configs :
c.parent_config_index = 0
# Make sure that any parent isn't hanging on to redundant config's of
# ours
if self.included() :
for c in self.config :
if c == 0 :
continue
prt_cfg = self.parent.config[self.config[c].parent_config_index]
prt_cfg.child_configs.remove(self.config[c])
# Remove remaining references to old configs
self.config[0].next = None
self.config = {0:self.config[0]}
# Do a recalc to tidy everything into position
self.recalc_bbox(quiet=quiet)
# do_break indicates whether we want to be the top-level config, which
# I think we generally do not
def config_break(self, x_pos, quiet=False, do_break=False) :
'''
Split ourselves around x_pos
'''
# Can we even do this?
bkble = self.get_breakable()
broken = False
# Transform to local coords if ness
lx_pos = x_pos
if self.get_local_space() :
lx_pos -= self.config[0].bbox[0]
# Go through the configs and see if they need broken
for c in self.config :
c = self.config[c]
# Does the break-point fall in this config?
if c.get_bbox()[0] < x_pos and c.get_bbox()[2] > x_pos :
# Add a new config (done in init)
ind = len(self.config)
new_cfg = Entity.GlypherConfiguration(self, ind)
if not do_break :
# If we don't want this config inside us, remove from list
del self.config[ind]
# Tell this new config where it is
new_cfg.row = c.row
new_cfg.col = c.col
if do_break :
# Tie into our stream
new_cfg.next = c.next
c.next = new_cfg
# Add the child_configs member as this is a config for a Phrase
new_cfg.child_configs = []
# Find child configs of current config
ccs = list(c.child_configs)
# How many of them do we want to stay in the current (leftward)
# config?
keepers = 0
for cc in ccs :
if cc.get_bbox()[2] < x_pos :
keepers += 1
keeper_shortage = False
# Run through the child configs to see if they need broken
for cc in ccs :
# This one does
if cc.get_bbox()[0] < x_pos and cc.get_bbox()[2] > x_pos :
child_config_halves = \
cc.get_entity().config_break(lx_pos, quiet=True,
do_break=True)
# If we can split this config, put the second half
# into the new config
if child_config_halves is not None :
broken = True
# If we break ourselves, tell the second half of the
# broken child config its in our new config,
# otherwise it thinks its in the current config
if do_break :
child_config_halves[1].parent_config_index = ind
else :
child_config_halves[1].parent_config_index = \
c.index
c.child_configs.append(child_config_halves[1])
# In any case, new_cfg should have it
new_cfg.child_configs.append(child_config_halves[1])
# As long as there are some child configs in our current
# config then, if this didn't split, we can move the
# whole thing to the new config
elif keepers > 0 :
if do_break :
cc.parent_config_index = ind
c.child_configs.remove(cc)
new_cfg.child_configs.append(cc)
# If there aren't, then leave it where it is, otherwise
# we'll end up emptying the current config
else :
keeper_shortage = True
# This falls entirely on the RHS of x_pos - put it in the
# new config
elif cc.get_bbox()[0] >= x_pos :
if do_break :
cc.parent_config_index = ind
c.child_configs.remove(cc)
new_cfg.child_configs.append(cc)
# If we made a successful split, tidy everything up and return
# both halves
if not keeper_shortage and len(new_cfg.child_configs) > 0 and \
(bkble or broken) :
new_cfg.reset()
self.recalc_basebox()
self.recalc_bbox(quiet=quiet, do_reset=quiet)
return (c, new_cfg)
# Otherwise, if the new config has been added, remove it again,
# as we didn't actually split anything
elif do_break :
l = list(new_cfg.child_configs)
for cc in l :
cc.parent_config_index = c.index
new_cfg.child_configs.remove(cc)
c.child_configs.append(cc)
c.next = None
del self.config[ind]
return None
def _adjust_bbox(self, bbox) :
'''
Override this to expand (or contract) bbox after it has been set by
contained elements.
'''
pass
def decorate(self, cr) :
'''
Show frilly bits for interactive mode
'''
# Draw a line around our first config
if len(self.entities) == 0 :
cr.save()
cr.move_to(self.config[0].bbox[0] - 4, self.config[0].bbox[1] - 10)
cr.line_to(self.config[0].bbox[2] + 4, self.config[0].bbox[1] - 10)
cr.line_to(self.config[0].bbox[2] , self.config[0].bbox[1] )
cr.line_to(self.config[0].bbox[0] , self.config[0].bbox[1] )
cr.close_path()
cr.set_source_rgba(0.5, 1.0, 0.5, 0.8)
cr.fill()
cr.restore()
# Show our alignment boxes
if g.show_rectangles :
cr.save()
cr.set_line_width(2.0)
cr.set_source_rgba(1.0, 0.5, 0.5, 0.4)
cr.rectangle(self.config[0].bbox[0]-2, self.config[0].bbox[1]-2,
self.config[0].bbox[2]-self.config[0].bbox[0]+4,
self.config[0].bbox[3]-self.config[0].bbox[1]+4)
cr.stroke()
cr.set_source_rgba(1.0, 0.5, 1.0, 0.4)
for j in (3,4,5) :
cr.move_to(self.config[0].basebox[0]-5,
self.config[0].basebox[j])
cr.line_to(self.config[0].basebox[2]+5,
self.config[0].basebox[j])
for i in (0,2) :
cr.move_to(self.config[0].basebox[i],
self.config[0].basebox[j]-2)
cr.line_to(self.config[0].basebox[i],
self.config[0].basebox[j]+2)
cr.stroke()
cr.restore()
if self.line_length > 0 :
cr.save()
cr.set_source_rgb(0.4, 1.0, 0.4)
cr.move_to(self.config[0].bbox[0] + self.line_length,
self.config[0].basebox[3])
cr.rel_line_to(0, self.get_basebox_height())
cr.stroke()
cr.restore()
self.draw_topbaseline(cr)
# Highlighting
if self.get_attached() and g.additional_highlighting :
cr.save()
cr.move_to(self.config[0].bbox[0]-2, self.config[0].bbox[3]+2)
draw.draw_blush(cr, self.config[0].bbox[2]-self.config[0].bbox[0]+4,
(0.5,0,0) )
cr.restore()
_num_recalcs = 0
#FIXME: The paragraph of comments below is probably out of date - go through
# We don't recalc the baseboxes until the end of this as recalc_bbox
# shouldn't use them and they have a significant overhead. However, if
# anything in here does need a row basebox, this should be considered.
# Actually, inserting one at start of recalc_bbox to be safe. However,
# recalc_row_basebox depends on entity baseboxes and, only if it has a row
# full of invisible entities, row_bboxes so if the second dependency were
# removed, this recalc_basebox call could be moved to child_bbox_changed,
# provided that the one commented out in realign_children is readded
def recalc_bbox(self, quiet=False, enact=True, realign=True,
sub_pos_search_dir=None, compare_rows=False,
in_feed_chain=False, do_reset=True) :
# Don't allow this to run until we're fully initialized - this is a
# slightly hacky check, but it's the key reason we don't
if self.get_p('blank_ratio') is None :
return
# If we have to realign as well as recalc, check the baseboxes
if realign :
self.recalc_baseboxes()
# If we're actually supposed to do the translation on this call, define
# ourself as the top level. If not, we're in a recursive call -
# increment the nesting counter
if enact :
self._num_recalcs = 0
else :
self._num_recalcs += 1
# This has taken too many recursive calls, give up.
if self._num_recalcs > g.max_recalcs :
raise RuntimeError('Exceeded maximum bbox recalculations for '+\
str(self.format_me())+\
'; probably some unending alignment loop')
# Has a change occurred during this routine?
change = False
# Pick anchor coords
l, b = (self.config[0].bbox[0], self.config[0].bbox[3])
for c in self.config :
config = self.config[c]
bbox = list(config.bbox)
# If this config is empty, give it a bbox of appropriate size and
# shape - blank_ratio tells us what width this should be
if self.get_visible_entities() == 0 :
pad = self.padding
bl_width = self.get_p('blank_ratio')*self.get_scaled_font_size()
bbox = [bbox[0]+pad[0],
bbox[3]-0.6*self.get_scaled_font_size()-pad[3],
bbox[0]+bl_width+pad[0],
bbox[3]-pad[3]]
# Adjust for local coords if ness
lbbox = list(bbox)
if self.get_local_space() :
lbbox[0] -= l; lbbox[1] -= b
lbbox[2] -= l; lbbox[3] -= b
self.row_bboxes[0] = list(lbbox)
self.col_bboxes[0] = list(lbbox)
else :
# Pick out the visible child configs of this config
vis_cfgs = filter(lambda c : c.get_entity().get_visible(),
config.child_configs)
# Start with the first child config as a guess for the config
# bbox
bbox = list(vis_cfgs[0].bbox)
row_ticks = []
col_ticks = []
recalc_rows = []
for cfg in vis_cfgs :
ent = cfg.get_entity()
# Extend our bbox around this entity
h_ig = ent.get_horizontal_ignore()
v_ig = ent.get_vertical_ignore()
if not h_ig and cfg.bbox[0] < bbox[0] :
bbox[0] = cfg.bbox[0]
if not v_ig and cfg.bbox[1] < bbox[1] :
bbox[1] = cfg.bbox[1]
if not h_ig and cfg.bbox[2] > bbox[2] :
bbox[2] = cfg.bbox[2]
if not v_ig and cfg.bbox[3] > bbox[3] :
bbox[3] = cfg.bbox[3]
# Factor this into our row & col bboxes
r = cfg.row
if r not in row_ticks :
self.row_bboxes[r] = list(cfg.bbox)
row_ticks.append(r)
else :
if not ent.get_horizontal_ignore() \
and cfg.bbox[0] < self.row_bboxes[r][0] :
self.row_bboxes[r][0] = cfg.bbox[0]
if not ent.get_vertical_ignore() \
and cfg.bbox[1] < self.row_bboxes[r][1] :
self.row_bboxes[r][1] = cfg.bbox[1]
if not ent.get_horizontal_ignore() \
and cfg.bbox[2] > self.row_bboxes[r][2] :
self.row_bboxes[r][2] = cfg.bbox[2]
if not ent.get_vertical_ignore() \
and cfg.bbox[3] > self.row_bboxes[r][3] :
self.row_bboxes[r][3] = cfg.bbox[3]
c = cfg.col
if c not in col_ticks :
self.col_bboxes[c] = list(cfg.bbox)
col_ticks.append(c)
else :
if not ent.get_horizontal_ignore() \
and cfg.bbox[0] < self.col_bboxes[c][0] :
self.col_bboxes[c][0] = cfg.bbox[0]
if not ent.get_vertical_ignore() \
and cfg.bbox[1] < self.col_bboxes[c][1] :
self.col_bboxes[c][1] = cfg.bbox[1]
if not ent.get_horizontal_ignore() \
and cfg.bbox[2] > self.col_bboxes[c][2] :
self.col_bboxes[c][2] = cfg.bbox[2]
if not ent.get_vertical_ignore() \
and cfg.bbox[3] > self.col_bboxes[c][3] :
self.col_bboxes[c][3] = cfg.bbox[3]
min_height = self.get_min_height()
if len(self.rows) == 1 and min_height is not None and bbox[3]-bbox[1] < min_height :
bbox[1] = bbox[3]-min_height
self._adjust_bbox(bbox)
# If necessary account for local coords
if self.get_local_space() :
bbox[0] += l; bbox[1] += b
bbox[2] += l; bbox[3] += b
d = (-1, -1, 1, 1)
for i in range(0, 4) :
bbox[i] += d[i]*self.padding[i]
config.set_bbox(bbox)
# Do any necessary realignment
if realign :
change = self._realign_children()
# FIXME:This needs fixed
#if self.keep_min_row_height :
# if bbox[3] - bbox[1] < self.get_scaled_line_size() :
# bbox[1] = bbox[3] - self.get_scaled_line_size()
# for r in self.rows :
# if self.row_bboxes[r][3] - self.row_bboxes[r][1] < \
# self.get_scaled_line_size() :
# self.row_bboxes[r][1] = self.row_bboxes[r][3] -\
# self.get_scaled_line_size()
#config.update_basebox()
# Do any mandatory entity recalcs. If it changes anything, we'll need to
# run another recalc
entity_recalc_change = False
if realign :
for ent in self.entities :
if ent.get_always_recalc() :
entity_recalc_change = ent.recalc_bbox(quiet=True) or\
entity_recalc_change
#FIXME:isn't this redundant?
if realign and entity_recalc_change :
change = self._realign_children() or change
change = change or entity_recalc_change
# If our reference config isn't at our anchor and we're in local coords,
# then we should move everything to make sure it is
h, v = (self.config[0].bbox[0]-l, self.config[0].bbox[3]-b)
if self.get_local_space() and (h,v) != (0,0) :
ents = [c.get_entity() for c in self.sort_configs(rev=(h>0))]
for ent in ents :
ent.translate(-h, -v, quiet=True)
change = True
# Alignment shouldn't be dependent on the parent's bbox
# If anything changed, recalc
if change :
self.recalc_bbox(quiet=True, enact=False)
# If we're responsable for actually doing something, do so
if realign and enact :
# Adjust to compensate for padding and visibility
for c in self.config :
config = self.config[c]
config.width = config.bbox[2] - config.bbox[0]
if not self.get_visible() :
config.bbox[2] = config.bbox[0]
if not fc(config.bbox[0], config.old_bbox[0])\
and self.included() :
p = self.get_parent()
if sub_pos_search_dir is None :
sub_pos_search_dir = (config.bbox[0]<config.old_bbox[0])
config.sub_pos = p.get_free_sub_pos(config,
search_dir_fwd=sub_pos_search_dir)
# Recalc alignment box
self.recalc_basebox()
# Does this need to happen if bbox == old_bbox (and baseboxes same,
# I guess)?
if do_reset :
self.feed_up(quiet=quiet, in_feed_chain=in_feed_chain)
return change
def get_min_height(self) :
'''
Get the (scaled) minimum height for this phrase
'''
min_height = self.get_p('min_height')
if min_height is None :
return None
return min_height * self.get_scaled_font_size()
def recalc_basebox(self) :
'''
Recalculate our alignment box
'''
# Do standard stuff
Entity.GlypherEntity.recalc_basebox(self)
# Calc alignment box for each of the configs
for c in self.config :
cfg = self.config[c]
m = cfg.get_basebox()
# If we should be aligning to the middle of the config, do so
if not self.get_p('align_as_entity') and \
0 in self.row_bboxes and \
self.row_bboxes[0] != None and \
self.get_visible_entities() != 0 and \
0 in cfg.baseboxes :
l = cfg.baseboxes[0]
o = self.config[0].bbox[3] if self.get_local_space() else 0
m = (m[0], m[1], m[2], m[3], l[4]+o, m[5])
cfg.basebox = m
def recalc_baseboxes(self, recalc=True) :
'''
Recalculate all of the per row alignment boxes
'''
for r in self.rows :
self.recalc_row_basebox(r, recalc=recalc)
def get_visible_entities(self, r = None) :
'''
Get the visible entities (possibly in a particular row)
'''
if r is None :
ents = self.entities
else :
#FIXME: you wot?
ents = list(set([c.get_entity() for c in self.get_row(r)]))
return len(filter(lambda e: e.get_visible(), ents))
def recalc_row_basebox(self, r, recalc = True) :
'''
Recalculate the row alignment of a given row
'''
# Take care of case where this row is empty of visible elements
if self.get_visible_entities(r) == 0 :
if len(self.rows) == 1 :
if self.get_local_space() :
rbb = (self.padding[0], -self.get_height()+self.padding[1],
self.get_width()-self.padding[2], -self.padding[3])
else :
bbox = self.config[0].bbox
pad = self.padding
rbb = (bbox[0]+pad[0], bbox[1]+pad[1], bbox[2]-pad[2],
bbox[3]-pad[3])
else :
rbb = self.row_bboxes[r]
basebox = (rbb[0], 0.5*(rbb[0]+rbb[2]), rbb[2],
rbb[1], 0.5*(rbb[1]+rbb[3]), rbb[3])
else :
#FIXME: am I missing something or is it possible for c0 to be
#FIXME: invisible and does it matter?
c0 = self.get_row(r)[0]
# Do the start with first and expand around trick
basebox = list(c0.get_basebox())
basebox[1] = 0; basebox[4] = 0
n = 0
for c in self.get_row(r) :
if not c.get_entity().get_visible() :
continue
eb = c.get_basebox()
if eb[0] < basebox[0] : basebox[0] = eb[0]
basebox[1] += eb[1]
if eb[2] > basebox[2] : basebox[2] = eb[2]
if eb[3] < basebox[3] : basebox[3] = eb[3]
basebox[4] += eb[4]
if eb[5] > basebox[5] : basebox[5] = eb[5]
n += 1
# This ensures that the middle and centre alignments are the average
# of the configs
basebox[1] /= n
basebox[4] /= n
# The basebox of the primary config is the de facto basebox of the
# entity (IIRC)
self.config[0].baseboxes[r] = tuple(basebox)
def _realign_children(self) :
'''
Straighten out the kids.
'''
# Have we changed anything?
change = False
# Baseboxes of the primary config are always used for general alignment
baseboxes = self.config[0].get_baseboxes()
cfgs = self.sort_configs()
for cfg in cfgs :
ent = cfg.get_entity()
basebox = baseboxes[cfg.row]
# What way should this be aligned?
al = ent.get_align()
if al[1] == 'b' :
mv = basebox[5]-cfg.get_basebox()[5]
# These tolerances avoid us shuffling back indefinitely to
# converge below a visually distinguishable error (actually we
# need to converge a good bit lower than that, but 1e-5 seems a
# good guess)
if abs(mv) > sm_tol :
ent.translate(0, mv, quiet=True, config=cfg.index)
change = True
elif al[1] == 'm' :
mv = basebox[4]-cfg.get_basebox()[4]
# Larger to avoid honing in slowing us down
if abs(mv) > bg_tol :
ent.translate(0, mv, quiet=True, config=cfg.index)
change = True
elif al[1] == 't' :
mv = basebox[3]-cfg.get_basebox()[3]
if abs(mv) > sm_tol :
ent.translate(0, mv, quiet=True, config=cfg.index)
change = True
# Now realign the rows
change = change or self._row_realign()
return change
# assume we want offset from previous col/row towards 0,0
def _row_realign(self) :
'''
...then line them up. Actually, this does columns too
'''
change = False
row_bboxes = self.row_bboxes
col_bboxes = self.col_bboxes
refbbox = row_bboxes[0]
row_baseboxes = self.config[0].get_baseboxes()
refbasebox = row_baseboxes[0]
# Order rows moving away from primary row
self.rows.sort(key=abs)
for r in self.rows :
# Don't touch the primary row
if r == 0 :
continue
# Direction
d = 1 if r < 0 else -1;
# Offset specifies additional gap from previous row - this is why
# order is important
offset = self.row_offsets[r]
if offset is not None :
gap = row_baseboxes[r][4] - row_baseboxes[r+d][4] - offset
else :
gap = row_bboxes[r][2+d] - row_bboxes[r+d][2-d]
# Translate if needs be
if abs(gap) > sm_tol :
change = True
self.row_translate(r, 0, -gap, quiet=True)
# How to align rel to row?
ra = self.row_aligns
if ra[r] == 'l' : mv = refbasebox[0]-row_baseboxes[r][0]
elif ra[r] == 'c' : mv = refbasebox[1]-row_baseboxes[r][1]
elif ra[r] == 'r' : mv = refbasebox[2]-row_baseboxes[r][2]
if abs(mv) > sm_tol :
change = True
self.row_translate(r, mv, 0, quiet=True)
for c in self.cols :
if c == 0 :
continue
d = 1 if c < 0 else -1
offset = self.col_offsets[c]
if offset is not None :
gap = col_bboxes[c][0] - col_bboxes[0][0] - offset
else :
gap = col_bboxes[c][1+d] - col_bboxes[c+d][1-d]
if abs(gap) > sm_tol :
change = True
self.col_translate(c, -gap, 0, quiet=True)
return change
def col_translate(self, c, h, v, quiet=False) :
'''
Move a whole column
'''
for cfg in self.get_col(c) :
cfg.get_entity().translate(h, v, quiet=True, config=cfg.index)
# This is important as the row may not have any (visible) elements
# so an automatic calculation of the row_bboxes will not respond
# to any translation. Hence, to allow realigns of this row to occur, the
# translation must be made in the row_bboxes also, so that, if
# we have no visible elements, we can still resolve the alignment
# FIXME: put it in - we currently assume every col has a non-zero elt
self.col_bboxes[c][0] += h
self.col_bboxes[c][1] += v
self.col_bboxes[c][2] += h
self.col_bboxes[c][3] += v
if not quiet : # Note that this doesn't reset the config box if quiet
self.feed_up()
def row_translate(self, r, h, v, quiet=False) :
'''
Move a whole row
'''
for c in self.get_row(r) :
c.get_entity().translate(h, v, quiet=True, config=c.index)
# This is important as the row may not have any (visible) elements
# so an automatic calculation of the row_bboxes will not respond
# to any translation. Hence, to allow realigns of this row to occur, the
# translation must be made in the row_bboxes also, so that, if
# we have no visible elements, we can still resolve the alignment
# Should col_bboxes recalc be added here?
self.row_bboxes[r][0] += h
self.row_bboxes[r][1] += v
self.row_bboxes[r][2] += h
self.row_bboxes[r][3] += v
if not quiet : # Note that this doesn't reset the config box if quiet
self.feed_up()
def find_at_point(self, point) :
'''
Get the entity inside here closest to this point
'''
if self.IN() != self :
return self.IN().find_at_point(point)
if not self.contains(point) :
return None
for ent in self.get_entities() :
closest = ent.find_at_point(point)
if closest != None :
return closest
return self
def find_in_dir(self, pos, left=True, exclude=None, row=None, col=None,
vertical_ignore=True, drop_down=True, self_attach=None,
must_move=True) :
'''
Find the first suitable entity moving in direction left/right from pos
'''
if self.IN() != self :
return self.IN().find_in_dir(pos, left, exclude, row, col,
vertical_ignore, drop_down,
self_attach, must_move)
closest = None
d = 1 if left else -1
# If we can't enter this, or look below, there's no point
if not self.is_enterable() and not drop_down :
return None
for sym in self.entities :
# Don't enter what you can't see
if not sym.get_visible() :
continue
# We've been told to exclude this. So do.
if sym == exclude or sym.IN() == exclude :
continue
# We definitely can't attach or go into this
if not (sym.is_attachable() or drop_down) or \
(not self.is_enterable() and not sym.am('phrase')) :
continue
# Wrong row
if row is not None and sym.config[0].row != row:
continue
# Wrong col
if col is not None and sym.config[0].col != col:
continue
got_from_sub_phrase = False
if drop_down and sym.am('phrase') :
test_closest = sym.find_in_dir(\
pos=pos, left=left, exclude=exclude,\
row=None, col=None, vertical_ignore=vertical_ignore,
drop_down=True, self_attach=True, must_move=must_move)
# If we've found a suitable entity in this child and it beats
# out our current closest
if test_closest is not None and \
(closest == None or \
d*(closest.config[0].bbox[2] -\
test_closest.config[0].bbox[2]) < 0 or \
(fc(test_closest.config[0].bbox[2],
closest.config[0].bbox[2]) and\
d*(closest.config[0].bbox[0] -\
test_closest.config[0].bbox[0]) < 0)) :
closest = test_closest
got_from_sub_phrase = True
# If we haven't just picked up a closest and this sym fits better
# than what we have (and we can attach to it where it is), then use
# it as the new closest
if not got_from_sub_phrase and \
(vertical_ignore or \
(sym.config[0].bbox[3] > pos[1]-self.get_height() or \
sym.config[0].bbox[1] < pos[1])) and \
(d*(sym.config[0].bbox[2] - pos[0]) < 0 or \
(fc(sym.config[0].bbox[2],pos[0]) and \
row!=None and col!=None) or \
(fc(sym.config[0].bbox[2],pos[0]) and \
d*(sym.config[0].bbox[0] - pos[0]) < 0)) :
right_dist = d*(closest.config[0].bbox[2]-sym.config[0].bbox[2])
left_dist = d*(closest.config[0].bbox[0]-sym.config[0].bbox[0])
if closest == None or right_dist < 0 or\
(fc(sym.config[0].bbox[2],closest.config[0].bbox[2]) \
and left_dist < 0) :
if sym.is_attachable() and self.is_enterable() :
closest = sym
# Default for self attaching
if self_attach is None :
self_attach = True
# Check whether we are eligible ourselves if nothing else has been found
if closest == None and self_attach and self.is_attachable() and \
exclude != self and self.is_enterable() and \
(d*(self.config[0].bbox[0] - pos[0]) < 0 or \
(not must_move and fc(self.config[0].bbox[0],pos[0]))):
return self
return closest
# Must be a better way!!
#FIXME: is this now redundant?
class GlypherExpression(GlypherPhrase) :
'''
Type of phrase that's got tailored defaults more appropriate to a
full mathematical expression
'''
def __init__(self, parent, area=(0,0,0,0), line_size_coeff=1.0,
font_size_coeff=1.0, align=('l','m'), auto_fices=True) :
GlypherPhrase.__init__(self, parent, area, line_size_coeff,
font_size_coeff, align, auto_fices)
self.mes.append("expression")
class GlypherMainPhrase(GlypherPhrase) :
'''
Phrase that can act as a standalone parent for use in a widget
'''
# This member allows contact with the outside world; if a gobject signal is
# to be fired, the _receiving_ GObject (e.g. GlyphEntry) should do so
signal_recipient = None
def set_is_caching(self, is_caching) :
'''
Whether or not this gets rid of object representation of children in
favour of XML that it can parse, on change, to an ImageSurface
'''
self.is_caching = is_caching
def set_by_bbox(self, by_bbox) :
'''Align by bbox (or basebox)'''
self.set_p('by_bbox', by_bbox)
def get_by_bbox(self) :
'''Align by bbox (or basebox)'''
return self.get_p('by_bbox')
def set_anchor(self, anchor) :
'''Anchor alignment'''
self.set_p('anchor', anchor)
def get_anchor(self) :
'''Anchor alignment'''
return self.get_p('anchor')
def set_anchor_point(self, anchor_point) :
'''Fixed reference point'''
self.set_p('anchor_point', anchor_point)
def get_anchor_point(self) :
'''Fixed reference point'''
return self.get_p('anchor_point')
def set_area(self, area) :
'''
Reset to a given area, which evaluates the anchor point based on the
anchor (alignment).
'''
area = (0, 0, area[0], area[1])
anchor = self.get_anchor()
a1 = area[0] if anchor[0] == 'l' else \
area[2] if anchor[0] == 'r' else \
0.5*(area[0]+area[2])
a2 = area[1] if anchor[1] == 't' else \
area[3] if anchor[1] == 'b' else \
0.5*(area[1]+area[3])
self.set_anchor_point((a1, a2))
# Do any necessary translation
self.move()
def __init__(self, signal_recipient, line_size, font_size, rgb_colour,
is_decorated=False, anchor=('l','c'), by_first_row=True,
by_bbox=False) :
# This just saves a bit of recalculation (I think?)
trial_area = (0, 0, 0, line_size)
GlypherPhrase.__init__(self, None, trial_area, align=('l','m'))
self.add_properties({'anchor': ('l','c'), 'anchor_point': (0,0),
'deletable' : False, 'by_bbox' : False })
self.set_p('is_decorated', is_decorated)
self.set_by_bbox(by_bbox)
# This member allows us to send rudimentary signals to a widget
if signal_recipient :
self.signal_recipient = lambda s, d=None : signal_recipient(self, s, d)
self.set_anchor(anchor)
self.mes.append('main_phrase')
self.by_first_row = by_first_row
self.set_area((0,line_size))
self.set_line_size(line_size)
self.set_font_size(font_size)
self.set_rgb_colour(rgb_colour)
def to_clipboard(self, auto_paste=False, who=None, contents=False) :
'''
Tell a widget that we have a copy request for who (default None : us?)
'''
if self.signal_recipient :
self.signal_recipient("copy", (auto_paste, who, contents))
return True
return False
def show_decoration(self) :
'''
Is this decorated?
'''
return self.get_p('is_decorated')
def set_is_decorated(self, dec) :
'''
Is this decorated?
'''
if self.show_decoration() != dec :
self.set_p('is_decorated', dec)
self.redraw_required = True
background_colour = None
cairo_cache_image_surface = None
def _real_draw(self, cr) :
'''
If this isn't transparent, give it a bg colour. Then _real_draw as
normal
'''
if self.background_colour is not None :
cr.save()
cr.set_source_rgba(*self.background_colour)
for c in self.config :
cfg = self.config[c]
cr.rectangle(*self.config[c].bbox)
cr.fill()
cr.restore()
if self.in_selected() :
cr.save()
cr.set_source_rgba(*glypher.selected_colour)
for c in self.config :
cfg = self.config[c]
cr.rectangle(*self.config[c].bbox)
cr.fill()
cr.restore()
GlypherPhrase._real_draw(self, cr)
cached_xml = None
def decorate(self, cr) :
'''
Do decoration for MainPhrase
'''
self.draw_topbaseline(cr)
if g.show_rectangles :
# Big surrounding line
cr.save()
cr.set_line_width(4.0)
cr.set_source_rgba(0.3, 0.3, 0.3, 0.6)
cr.rectangle(self.config[0].bbox[0]-2, self.config[0].bbox[1]-2,
self.config[0].bbox[2]-self.config[0].bbox[0]+4,
self.config[0].bbox[3]-self.config[0].bbox[1]+4)
cr.stroke()
cr.restore()
# Cross at the anchor point
cr.save()
cr.set_line_width(2.0)
cr.set_source_rgba(0.3, 0.3, 0.3, 0.8)
a1, a2 = self.get_anchor_point()
cr.move_to(a1 - 10, a2 - 10)
cr.line_to(a1 + 10, a2 + 10)
cr.move_to(a1 + 10, a2 - 10)
cr.line_to(a1 - 10, a2 + 10)
cr.stroke()
cr.restore()
def scale(self, s) :
'''
Execute a scaling, but make sure we also do the subsequent MainPhrase
translation to align to anchor
'''
GlypherPhrase.scale(self, s)
self.move()
def child_change(self) :
'''
If any material changes occur, tell our signal recipient, do any
necessary word wrapping, simplifying and translation to anchor.
'''
self.config[0].check()
self.make_simplifications()
self.word_wrap()
self.move()
if self.signal_recipient :
self.signal_recipient("recalc")
def child_altered(self, child=None) :
'''
If anything happens to a child, we still need to additionally do the
child_change routine for MainPhrase, to make sure all alignment is tidy.
'''
GlypherPhrase.child_altered(self, child)
self.child_change()
# Note that this (currently) aligns to the first row's basebox
def move(self, a1=None, a2=None) :
'''
Realign (by simple translation) to the anchor point. This may involve
aligning using the basebox or bbox, according to settings. Default
alignment is to current anchor point
'''
# Choose appropriate anchor point and
if a1 is None :
a1 = self.get_anchor_point()[0]
if a2 is None :
a2 = self.get_anchor_point()[1]
anchor = self.get_anchor()
if self.get_by_bbox() :
bbox = self.config[0].bbox
bb = (bbox[0], (bbox[0]+bbox[2])*0.5, bbox[2],
bbox[1], (bbox[1]+bbox[3])*0.5, bbox[3])
else :
if self.by_first_row :
bb = self.config[0].baseboxes[0]
else :
bb = self.config[0].basebox
# Which point do we need to translate to anchor?
b1 = bb[
0 if anchor[0]=='l' else\
2 if anchor[0]=='r' else\
1]
b2 = bb[
3 if anchor[1]=='t' else\
5 if anchor[1]=='b' else\
4]
# Move (if ness)
if not fc(a1, b1) :
self.translate(a1-b1, 0, quiet=True)
if not fc(a2, b2) :
self.translate(0, a2-b2, quiet=True)
# Update anchor
self.set_anchor_point((a1,a2))
def get_main_phrase(self) :
'''
Override upward calling routine to return ourselves
'''
return self | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glypher/Phrase.py | Phrase.py |
import sympy
import Plot
import copy
import Entity
from Interpret import *
from BinaryExpression import *
from PhraseGroup import *
try :
from sympy.solvers import ode
except :
print "Version of sympy too old; you may have subsequent problems!"
def get_arg_innards(args) :
return args
inns = []
for arg in args :
if len(arg.get_entities()) != 1 :
return "Argument "+str(args.index(arg))+ " '"+str(arg.to_string())+"' contains multiple/zero entities"
inns.append(arg.get_entities()[0])
return inns
def source(caret, *args) :
args = get_arg_innards(args)
if isinstance(args, str) : return args
if len(args) != 1 :
return "Too many arguments"
ret = Plot.make_source(args[0], caret)
return make_word('Successfully sourced' if ret is not None else 'Source-making unsuccessful', None)
def separate(join, caret, *args) :
args = get_arg_innards(args)
if isinstance(args, str) : return args
if len(args) == 3 and args[1].to_string() == u'wrt' :
wrt = args[2]
sym = wrt.get_sympy()
if not isinstance(sym, Dynamic.Symbol) :
return "WRT not a sympy symbol (usu. a GlypherWord or subscripted Word)"
op = Dynamic.together if join else Dynamic.apart
return interpret_sympy(None, op(args[0].get_sympy(), sym))
elif join and len(args) == 1 :
return interpret_sympy(None, Dynamic.together(args[0].get_sympy()))
return "Wrong arguments"
def plot(caret, *args) :
args = get_arg_innards(args)
if isinstance(args, str) : return args
if len(args) == 1 :
ret = Plot.make_plot(args[0], caret)
elif len(args) == 3 and args[1].to_string() == u'for' and \
args[2].am('elementof') and\
args[2].poss[1].get_entities()[0].am('interval') :
interval = args[2].poss[1].get_entities()[0]
debug_print(interval.format_me())
ret = Plot.make_plot(args[0], caret,
(args[2].poss[0].get_sympy(),
interval.get_lhs().get_sympy(),
interval.get_rhs().get_sympy()))
#ret = Plot.make_plot(args[0], caret)
else :
return "Wrong arguments"
return make_word('Plotted' if ret else 'Could not plot', None)
def let(caret, *args) :
args = get_arg_innards(args)
if isinstance(args, str) : return args
if len(args) >2 and args[1].to_string() == u'be':
func = args[0]
sym = func.get_sympy()
if not isinstance(sym, Dynamic.Symbol) :
return "LHS not a sympy symbol (usu. a GlypherWord or subscripted Word)"
if args[2].to_string() in (u'wildcard', u'wild', u'w') :
excl = {}
if len(args) == 5 and args[3].to_string() == u'excluding' :
sy = args[4].get_sympy()
excl['exclude'] = sy if isinstance(sy, list) else [sy]
debug_print(excl)
g.define_symbols[sym] = \
Dynamic.Wild(str(func._get_symbol_string(sub)), **excl)
return GlypherSpaceArray(None, lhs=make_word('Defined', None),
rhs=GlypherEntity.xml_copy(None, args[0]))
if args[2].to_string() in (u'indexed', u'tensor', u'I') :
shape = {}
if len(args) == 4 :
sy = args[3].get_sympy()
shape['shape'] = sy if isinstance(sy, list) else [sy]
g.define_symbols[sym] = \
Dynamic.IndexedBase(str(func._get_symbol_string(sub)), **shape)
return GlypherSpaceArray(None, lhs=make_word('Defined', None),
rhs=GlypherEntity.xml_copy(None, args[0]))
if args[2].to_string() in (u'index', u'i') :
shape = {}
if len(args) == 4 :
sy = args[3].get_sympy()
shape['range'] = sy
g.define_symbols[sym] = \
Dynamic.Idx(str(func._get_symbol_string(sub)), **shape)
return GlypherSpaceArray(None, lhs=make_word('Defined', None),
rhs=GlypherEntity.xml_copy(None, args[0]))
return "Don't know that type"
if len(args) != 1 :
return "Too many arguments without 'be'"
eq = args[0]
if eq.am('binary_expression') and eq.get_symbol_shape() == '=' :
lhs = eq['pos0']
rhs = eq['pos2']
if len(lhs.get_entities()) != 1 or len(rhs.get_entities()) != 1 :
return "LHS or RHS wrong length"
lhs = lhs.get_entities()[0]
rhs = rhs.get_entities()[0]
try :
lhs_sym = lhs.get_sympy()
except :
lhs_sym = None
if isinstance(lhs_sym, Dynamic.Symbol) :
if rhs.am('matrix') :
g.let_matrices[lhs_sym] = rhs.get_sympy()
return GlypherSpaceArray(None, lhs=make_word('Defined', None), rhs=lhs.copy())
if rhs.am('function') :
func_sym = rhs['name'].get_sympy()
if func_sym != lhs_sym :
return "Expect function name to match declared function"
if not isinstance(func_sym, Dynamic.Symbol) :
return "Function name not a sympy symbol (usu. a GlypherWord or subscripted Word)"
for arg in rhs.get_args() :
if not isinstance(arg.get_sympy(), Dynamic.Symbol) :
return "All arguments should give Sympy symbols"
g.define_functions[func_sym] = rhs.get_sympy_args()
return GlypherSpaceArray(None, lhs=make_word('Defined', None),
rhs=rhs['name'].get_entities()[0].copy())
if lhs.am('function') :
func_sym = lhs['name'].get_sympy()
if not isinstance(func_sym, Dynamic.Symbol) :
return "Function name not a sympy symbol (usu. a GlypherWord or subscripted Word)"
for arg in lhs.get_args() :
if not isinstance(arg.get_sympy(), Dynamic.Symbol) :
return "All arguments should give Sympy symbols"
g.let_functions[lhs['name'].get_sympy()] = \
Dynamic.Lambda(lhs.get_sympy_args(), rhs.get_sympy())
return GlypherSpaceArray(None, lhs=make_word('Defined', None), rhs=lhs.copy())
elif eq.am('elementof') :
contained_in = eq['pos2'].get_entities()[0]
if contained_in.am('realR' ) :
assumptions = { 'real' : True }
elif contained_in.am('complexC' ) :
assumptions = { 'complex' : True }
elif contained_in.am('rationalQ' ) :
assumptions = { 'rational' : True }
else :
raise RuntimeError('Unrecognized set')
g.define_symbols[eq['pos0'].get_sympy()] = \
Dynamic.Symbol(str(eq['pos0'].get_entities()[0]._get_symbol_string(sub)),
**assumptions)
return GlypherSpaceArray(None, lhs=make_word('Defined', None),
rhs=eq['pos0'].get_entities()[0].copy())
return "Need e.g. function with Word as name for LHS"
def match(caret, *args) :
args = get_arg_innards(args)
if isinstance(args, str) :
return args
if len(args) != 3 or args[1].to_string() != u'in' :
return "Format for match is 'Match A in B'"
resp = args[2].get_sympy().match(args[0].get_sympy())
if resp is not None :
return interpret_sympy(None, resp)
else :
return GlypherSpaceArray(None, lhs=make_word('No', None),
rhs=make_word('match', None))
def unlet(caret, *args) :
args = get_arg_innards(args)
if isinstance(args, str) :
return args
func = args[0]
sy = func.get_sympy(ignore_func=True)
if sy in g.define_symbols :
del g.define_symbols[sy]
elif sy in g.define_functions :
del g.define_functions[sy]
elif sy in g.let_functions :
del g.let_functions[sy]
else :
return GlypherSpaceArray(None, lhs=make_word('Not', None),
rhs=make_word('declared', None))
return GlypherSpaceArray(None, lhs=make_word('Undeclared', None),
rhs=GlypherEntity.xml_copy(None, func))
def define(caret, *args) :
args = get_arg_innards(args)
if isinstance(args, str) : return args
func = args[0]
if func.am('function') :
func_sym = func['name'].get_sympy()
if not isinstance(func_sym, Dynamic.Symbol) :
return "Function name not a sympy symbol (usu. a GlypherWord or subscripted Word)"
for arg in func.get_args() :
if not isinstance(arg.get_sympy(), Dynamic.Symbol) :
return "All arguments should give Sympy symbols"
g.define_functions[func_sym.get_sympy()] = func.get_sympy_args()
return GlypherSpaceArray(None, lhs=make_word('Defined', None),
rhs=func['name'].get_entities()[0].copy())
return "Did not fit a known Define format"
def set_equal(caret, *args) :
args = get_arg_innards(args)
if isinstance(args, str) : return args
if len(args) != 1 :
return "Too many arguments"
eq = args[0]
if not eq.am('binary_expression') or eq.get_symbol_shape() != '=' :
return "Expected equality"
lhs = eq.get_target('pos0')
rhs = eq.get_target('pos2')
if len(lhs.get_entities()) != 1 or len(rhs.get_entities()) != 1 :
return "LHS or RHS wrong length"
lhs = lhs.get_entities()[0]
sym = lhs.get_sympy()
if not isinstance(sym, Dynamic.Symbol) :
return "LHS not a sympy symbol (usu. a GlypherWord or subscripted Word)"
#sym = sympy.Symbol(str(lhs.to_string()))
g.var_table[sym] = rhs.get_entities()[0].get_sympy()
lhs = lhs.copy(); lhs.orphan(); lhs.set_parent(None)
return make_phrasegroup(None, 'equality', (lhs, interpret_sympy(None,
g.var_table[sym])))
def unset_equal(caret, *args) :
args = get_arg_innards(args)
if isinstance(args, str) : return args
if len(args) != 1 :
return "Too many arguments"
sym = args[0]
symp = sym.get_sympy(sub=False)
debug_print(g.var_table[symp])
if not isinstance(symp, Dynamic.Symbol) or symp not in g.var_table :
return "Unrecognized operand"
del g.var_table[symp]
return make_word('Success!', None)
def doit(caret, *args) :
args = get_arg_innards(args)
if isinstance(args, str) : return args
g.dit = args[0].am('Integral')
if len(args) != 1 :
return "Too many arguments"
return interpret_sympy(None, args[0].get_sympy().doit())
def solve(caret, *args) :
args = get_arg_innards(args)
if isinstance(args, str) : return args
debug_print(map(str, args))
if len(args) != 3 :
return "Need 3 arguments"
if args[1].to_string() != 'for' :
return "Need 'for'"
if isinstance(args[2].get_sympy(), Dynamic.Function) :
if not args[0].am('equality') :
return "Need equality as first argument"
for_sym = args[2].get_sympy()
sy = Dynamic.dsolve(args[0]['pos0'].get_sympy()-args[0]['pos2'].get_sympy(), args[2].get_sympy())
else :
if not (args[0].am('equality') or \
args[0].am('semicolon_array') or args[0].am('comma_array')) :
return "Need equality or array as first argument"
sy = Dynamic.solve(args[0].get_sympy(), args[2].get_sympy())
debug_print(sy)
return interpret_sympy(None, sy)
def diff(caret, *args) :
args = get_arg_innards(args)
if isinstance(args, str) : return args
if len(args) != 3 or args[1].to_string() != 'by' :
return "Wrong arguments"
return interpret_sympy(None, args[0].get_sympy().diff(args[2].get_sympy()))
def sub(caret, *args) :
args = get_arg_innards(args)
if isinstance(args, str) : return args
if len(args) != 3 :
return "Wrong argument number"
if args[1].to_string() != 'into' :
return "Incorrect syntax; don't forget 'into'"
if not args[0].am('equality') :
return "First argument should be an equality"
subargs = get_arg_innards(args[0].get_args())
if isinstance(args, str) : return subargs
return interpret_sympy(None, args[2].get_sympy().subs(subargs[0].get_sympy(), subargs[1].get_sympy()))
def series(caret, *args) :
args = get_arg_innards(args)
if isinstance(args, str) : return args
if len(args) == 6 :
if args[1].to_string() != 'about' :
return "Incorrect syntax; don't forget 'about'"
if args[3].to_string() != 'to' :
return "Incorrect syntax; don't forget 'to'"
if args[4].to_string() != 'order' :
return "Incorrect syntax; don't forget 'order'"
if not args[2].am('equality') :
return "Second argument should be an equality"
if not args[5].am('word') and args[5].is_num() :
return "Third argument should be an integer"
arg_about = 2; arg_order = int(args[5].to_string())
elif len(args) == 3 and args[1].to_string() == 'about' :
if not args[2].am('equality') :
return "Second argument should be an equality"
arg_about = 2; arg_order = 4
elif len(args) == 3 :
arg_about = 1; arg_order = int(args[2].to_string())
elif len(args) == 2 :
arg_about = 1; arg_order = 4
elif len(args) == 1 :
return interpret_sympy(None, args[0].get_sympy().expand(**g.expand))
else :
return "Wrong argument number"
subargs = get_arg_innards(args[arg_about].get_args())
if isinstance(args, str) : return subargs
debug_print((str(args[0].get_sympy()), str(subargs[0].get_sympy()), subargs[1].get_sympy().evalf(), arg_order))
return interpret_sympy(None, args[0].get_sympy().series(subargs[0].get_sympy(), subargs[1].get_sympy().evalf(), arg_order))
def operation_command(function, caret, *args) :
'''Pass the remainder of a SpaceArray as a series of sympy args to a
function.'''
args = get_arg_innards(args)
if isinstance(args, str) :
return args
sympy_args = [arg.get_sympy() for arg in args]
sympy_resp = function(*sympy_args)
return interpret_sympy(None, sympy_resp)
def do_substitutions_from_list(expr, csl) :
debug_print(expr)
if csl.am('equality') :
subargs = get_arg_innards(csl.get_args())
return expr.get_sympy().subs(subargs[0].get_sympy(), subargs[1].get_sympy())
elif csl.am('comma_array') :
subargs = get_arg_innards(csl.get_args())
expr = expr.get_sympy()
for a in subargs :
if not a.am('equality') : return "Should be a comma-separated list of equalities!"
subsubargs = get_arg_innards(a.get_args())
debug_print(expr)
expr = expr.subs(subsubargs[0].get_sympy(),
subsubargs[1].get_sympy())
return expr
else :
return "Expected an equality, or comma-separated list of equalities"
def evalf(caret, *args) :
args = get_arg_innards(args)
if isinstance(args, str) : return args
if len(args) == 1 :
return interpret_sympy(None, args[0].get_sympy().evalf())
elif len(args) == 3 and args[1].to_string() == 'at' :
dsfl = do_substitutions_from_list(args[0], args[2])
return interpret_sympy(None, dsfl) if isinstance(dsfl, str) else interpret_sympy(None, dsfl.evalf())
else : return "Wrong number of arguments or need an 'at'"
def limit(caret, *args) :
return None | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glypher/Commands.py | Commands.py |
from Entity import *
import math
import Dynamic
class GlypherPlane(GlypherEntity) :
coords = (0,0)
size = 3
def set_coords(self, coords) : self.coords = coords
def get_coords(self) : return self.coords
def recalc_bbox(self, quiet = False) :
self.cast()
return GlypherEntity.recalc_bbox(self, quiet=quiet)
def get_xml(self, name = None, top = True, targets = None, full=False) :
root = GlypherEntity.get_xml(self, name, top, full=full)
root.set('x', str(self.get_coords()[0]))
root.set('y', str(self.get_coords()[0]))
return root
def cast(self) :
self.set_ref_width(self.size*self.get_scaled_line_size())
self.set_ref_height(self.size*self.get_scaled_line_size())
def to_string(self, mode = "string") : return unicode("("+str(self.get_coords()[0])+","+str(self.get_coords()[1])+")");
def __init__(self, parent = None, coords=(1.0,1.0), x_string = None, y_string = None) :
GlypherEntity.__init__(self, parent)
self.add_properties({'is_wordlike' : True})
self.x_string = x_string
self.y_string = y_string
self.config[0].bbox[0] = 0
self.config[0].bbox[1] = 0
self.mes.append('plane')
self.set_attachable(True)
self.set_coords(coords)
self.recalc_bbox()
def draw(self, cr) :
if not self.get_visible() : return
cr.save()
cr.set_source_rgba(0,0,0,0.2)
l, t = [self.config[0].bbox[i]+self.padding[i] for i in (0,1)]
cr.rectangle(l, t,
self.get_ref_width() - self.padding[2], self.get_ref_height()-self.padding[3])
cr.stroke()
c1, c2 = (l+self.get_ref_width()/2, t+self.get_ref_height()/2)
plane_radius = max(*map(abs, self.get_coords()))
pr, pe = math.frexp(plane_radius); pe = pe*math.log(2)/math.log(10)
plane_radius = int(pr*10 + 3)/10. * pow(10, pe)
cr.save()
cr.translate(c1, c2)
s = self.get_height()/(2*plane_radius)
cr.scale(s, -s)
cr.set_line_width(plane_radius/20)
cr.move_to(-plane_radius, 0)
cr.line_to(plane_radius, 0)
cr.stroke()
cr.move_to(0, -plane_radius)
cr.line_to(0, plane_radius)
cr.stroke()
d = self.get_coords()
cr.set_source_rgba(0.0, 0.3, 0.3, 0.75)
cr.set_font_size(plane_radius/5)
mark = self.get_coords()[0]
mark, x_string = self._coord_to_string(mark) if self.x_string is None else (mark, self.x_string)
cr.move_to(mark, -plane_radius/20)
cr.line_to(mark, plane_radius/20)
cr.move_to(mark, 2*plane_radius/20)
cr.scale(1, -1)
cr.show_text(x_string)
cr.stroke()
cr.scale(1, -1)
mark = self.get_coords()[1]
mark, y_string = self._coord_to_string(mark) if self.y_string is None else (mark, self.y_string)
cr.move_to(-plane_radius/20, mark)
cr.line_to( plane_radius/20, mark)
cr.move_to(2*plane_radius/20, mark)
cr.scale(1, -1)
cr.show_text(y_string)
cr.stroke()
cr.scale(1, -1)
#debug_print(plane_radius)
cr.arc(d[0], d[1], plane_radius/30, 0, 2*math.pi)
cr.set_source_rgb(0, 1, 0)
cr.fill()
cr.restore()
cr.restore()
def _coord_to_string(self, y) :
if (abs(y) >= 0.01 and abs(y) < 1000) :
if abs(y) >= 100 :
string = ("%.0f"% y)
elif abs(y) >= 10 :
string = ("%.1f"% y)
elif abs(y) >= 1 :
string = ("%.2f"% y)
else :
string = ("%.3f"% y)
else :
string = ("%.1e" % y)
return (float(string), string)
class GlypherComplexPlane(GlypherPlane) :
def __init__(self, parent = None, z=complex(1.0), re_string = None, im_string = None) :
GlypherPlane.__init__(self, parent, coords=(z.real, z.imag), x_string=re_string, y_string=im_string)
self.mes.append('complex_plane')
def get_sympy(self) :
z = self.get_coords()[0] + Dynamic.I*self.get_coords()[1]
return Dynamic.sympify(str(z))
g.add_phrasegroup_by_class('plane', GlypherPlane)
g.add_phrasegroup_by_class('complex_plane', GlypherComplexPlane) | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glypher/ComplexPlane.py | ComplexPlane.py |
import os, math, sys, getopt, string
import pango
import random
from gtk import gdk
from ..tablemaker import PreferencesTableMaker
from aobject.utils import debug_print
import threading
import cairo, gtk, gobject
import matplotlib
import numpy, numpy.fft
import scipy, scipy.interpolate, scipy.optimize
from matplotlib.backends.backend_cairo import RendererCairo
from matplotlib.backends.backend_gtkcairo import FigureCanvasGTKCairo as mpl_Canvas
from matplotlib.backends.backend_gtkcairo import NavigationToolbar2Cairo as mpl_Navbar
import pylab
from aobject.aobject import string_to_float_tup
from PIL import Image
from Line import GlancerLine
class GlancerScatter(GlancerLine) :
def __init__(self, plot, line = None, source = None, env=None,
read_series=False, axes = None, aname="GlancerScatter"):
GlancerLine.__init__(self, plot, line=line, source=source, env=env,
read_series=read_series, axes=axes, aname=aname)
def _line_from_source(self) :
if self.source is None :
return
x_range = self.axes.get_xlim() if self.source.needs_x_range else None
values = self.source.source_get_values(multi_array=True, x_range=x_range)[0]
points = values['values']
series = range(0, len(points[0]))
dim = self.source.source_get_max_dim()
if dim > 1 :
trans = zip(*points)
if not self.read_series :
trans[0] = series
else :
trans = [series, points]
if self.line is None :
if self.source.source_type() == 'line' :
self.line = self.axes.plot(trans[0], trans[1])[0]
else :
self.line = self.axes.scatter(trans[0], trans[1])
else :
self.line.set_xdata(trans[0])
self.line.set_ydata(trans[1])
self.axes.figure.canvas.draw()
#PROPERTIES
def get_aesthete_properties(self):
return { 'label' : [self.change_label, self.get_label, True],
'colour' : [self.change_colour, self.get_colour, True],
'source' : [None, self.get_source, True] }
def get_colour(self, val=None):
return tuple(self.line.get_facecolor().tolist()[0][0:2]) \
if val==None else string_to_float_tup(val)
def change_colour(self, val) : self.line.set_facecolor(val); self.redraw()
def get_method_window(self) :
#frame = gtk.Frame(self.source)
hbox = gtk.HBox()
config_butt = gtk.Button(self.get_aname_nice()+'...')
config_butt.set_relief(gtk.RELIEF_NONE)
self.connect("aesthete-aname-nice-change", lambda o, a, v : self.set_label_for_button(config_butt,v))
self.connect("aesthete-aname-nice-change", lambda o, a, v : self.set_label_for_button(config_butt,v))
hbox.pack_start(config_butt)
remove_butt = gtk.Button(); remove_butt.add(gtk.image_new_from_stock(gtk.STOCK_CLEAR, gtk.ICON_SIZE_SMALL_TOOLBAR))
remove_butt.connect("clicked", lambda o : self.self_remove())
hbox.pack_start(remove_butt, False)
win = gtk.VBox()
label_ameu = self.aes_method_entry_update("label", "Label")
win.pack_start(label_ameu, False)
colour_hbox = gtk.HBox()
colour_label = gtk.Label("Set colour")
debug_print(self.aesthete_properties[('colour', self.get_aname())][1]())
colour_amcb = self.aes_method_colour_button("colour", "Face Colour")
colour_hbox.pack_start(colour_label); colour_hbox.pack_start(colour_amcb)
win.pack_start(colour_hbox, False)
if self.source is not None:
update_butt = gtk.Button("Replot")
update_butt.connect("clicked", lambda e : self.replot())
win.pack_start(update_butt, False)
self.connect("aesthete-property-change",
lambda o, p, v, a : self.set_label_for_button(config_butt))
win.show_all()
if self.env and self.env.action_panel :
win.hide()
win.aes_title = "Configure scatter plot"
config_butt.connect("clicked", lambda o : self.env.action_panel.to_action_panel(win))
else :
config_win = gtk.Window()
config_win.set_title("Configure scatter plot")
remove_butt = gtk.Button("Close"); remove_butt.connect("clicked", lambda o : config_win.hide())
win.pack_start(remove_butt)
config_win.add(win)
config_win.hide()
config_butt.connect("clicked", lambda o : config_win.show())
hbox.show_all()
#frame.add(win)
return hbox
class GlancerScatter3D(GlancerScatter) :
def __init__(self, plot, line = None, source = None, env=None,
read_series=False, axes = None, aname="GlancerScatter3D"):
GlancerScatter.__init__(self, plot, line=line, source=source, env=env,
read_series=read_series, axes=axes, aname=aname)
def _line_from_source(self) :
if self.source is None :
return
x_range = self.axes.get_xlim() if self.source.needs_x_range else None
values = self.source.source_get_values(multi_array=True, x_range=x_range)[0]
points = values['values']
series = range(0, len(points[0]))
dim = self.source.source_get_max_dim()
if dim > 1 :
trans = zip(*points)
if not self.read_series :
trans[0] = series
else :
trans = [series, points]
trans = (trans[0], trans[1], 0 if dim < 3 else trans[2])
self.line = self.axes.scatter(trans[0], trans[1], trans[2])
# Can't replot as no set_zdata :(
self.axes.figure.canvas.draw() | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glancer/Scatter.py | Scatter.py |
import os, math, sys, getopt, string
import pango
import random
from gtk import gdk
from ..tablemaker import PreferencesTableMaker
import threading
import cairo, gtk, gobject
import matplotlib
import numpy, numpy.fft
import scipy, scipy.interpolate, scipy.optimize
from matplotlib.backends.backend_cairo import RendererCairo
from matplotlib.backends.backend_gtkcairo import FigureCanvasGTKCairo as mpl_Canvas
from matplotlib.backends.backend_gtkcairo import NavigationToolbar2Cairo as mpl_Navbar
from matplotlib import cm
import pylab
from PIL import Image
from aobject.aobject import *
from ..glypher.Widget import GlyphImage
from ..glypher.Parser import parse_phrasegroup
class GlancerLineLike(AObject) :
time = None
def get_auto_aesthete_properties(self) :
return {'time' : (string_to_float_tup, (AOBJECT_CAN_NONE,)) }
def change_time(self, time) :
self.time = time
self.replot()
def get_time(self) :
return self.time
def __init__(self, plot, line = None, source = None, resolution=None, env=None,
read_series=False, axes = None, aname="GlancerLine", time=None):
self.read_series = read_series
self.axes = axes
self.resolution = resolution
self.time = time
if line is None and source is not None :
self.source = source
self._line_from_source()
elif source is None and line is not None :
self.line = line
else :
raise RuntimeError('Must specify one of line and source for Line')
self.plot = plot
self.source = source
AObject.__init__(self, aname, env, elevate=False)
self.set_aesthete_xml(self.source.get_aesthete_xml())
default_label = self.source.get_aname_nice()
if self.source.get_aesthete_xml() is None and len(default_label)>20 :
loc = string.rfind(default_label, ' ', 0, 20)
if loc != -1 :
default_label = default_label[0:loc]+"..."
self.change_property("label", default_label)
def set_resolution(self, res) :
self.change_property('resolution', res)
def replot(self) :
self._line_from_source()
def aes_get_parameters(self) :
d = AObject.aes_get_parameters(self)
d.update({'source' : self.source})
return d
def redraw(self) :
self.plot.do_legend()
self.line.figure.canvas.draw()
def self_remove(self, parent_remove = True) :
if (parent_remove) : self.plot.lines.remove(self); self.plot.check_legend()
if self.line in self.line.axes.lines :
self.line.axes.lines.remove(self.line)
self.aes_remove()
if (parent_remove) : self.redraw()
def set_label_for_button(self, button, obj=None) :
text = None
if obj is not None :
rep = obj.get_arepr()
if isinstance(rep, ET._ElementTree) :
button.remove(button.get_child())
ge = GlyphImage()
ge.set_xml(rep)
ge.show()
button.add(ge)
return
text = obj.get_aname_nice()
if text is not None :
button.set_label(text)
button.get_child().props.ellipsize = pango.ELLIPSIZE_END
button.get_child().modify_fg(gtk.STATE_NORMAL,
gtk.gdk.Color(*self.get_colour()))
button.set_tooltip_text(text)
class GlancerLine(GlancerLineLike) :
line = None
plot = None
source = None
resolution = None
def get_useful_vars(self) :
return {
'line' : 'mpl Line',
}
def _line_from_source(self) :
if self.source is None :
return
x_range = self.axes.get_xlim() if self.source.needs_x_range else None
values = self.source.source_get_values(multi_array=True,
x_range=x_range,
time=self.get_time(),
resolution=self.get_resolution())[0]
points = values['values']
series = values['x']
dim = self.source.source_get_max_dim()
trans = [series, points]
if self.line is None :
self.line = self.axes.plot(trans[0], trans[1])[0]
else :
self.line.set_xdata(trans[0])
self.line.set_ydata(trans[1])
self.axes.figure.canvas.draw()
def __init__(self, plot, line = None, source = None, resolution=None, env=None,
read_series=False, axes = None, aname="GlancerLine", time=None):
GlancerLineLike.__init__(self, plot, line=line, source=source,
resolution=resolution, env=env,
read_series=read_series, axes=axes, aname=aname, time=time)
#PROPERTIES
def get_aesthete_properties(self):
return { 'label' : [self.change_label, self.get_label, True],
'marker' : [self.change_marker, self.get_marker, True],
'linewidth' : [self.change_linewidth, self.get_linewidth, True],
'colour' : [self.change_colour, self.get_colour, True],
'source' : [None, self.get_source, True],
'markersize' : [self.change_markersize, self.get_markersize, True],
'markevery' : [self.change_markevery, self.get_markevery, True],
'markerfacecolor' : [self.change_markerfacecolor, self.get_markerfacecolor, True],
'alpha' : [self.change_alpha, self.get_alpha, True],
'zorder' : [self.change_zorder, self.get_zorder, True],
'visible' : [self.change_visible, self.get_visible, True],
'aa' : [self.change_aa, self.get_aa, True],
'linestyle' : [self.change_linestyle, self.get_linestyle, True],
'resolution' : [self.change_resolution, self.get_resolution, True],
'solid-capstyle' : [self.change_solid_capstyle, self.get_solid_capstyle, True],
'solid-joinstyle' : [self.change_solid_joinstyle, self.get_solid_joinstyle, True],
'dash-capstyle' : [self.change_dash_capstyle, self.get_dash_capstyle, True],
'dash-joinstyle' : [self.change_dash_joinstyle,
self.get_dash_joinstyle, True] }
#BEGIN PROPERTIES FUNCTIONS
def get_label(self, val=None): return self.line.get_label() if val==None else val
def get_marker(self, val=None): return self.line.get_marker() if val==None else val
def get_linewidth(self, val=None): return self.line.get_linewidth() if val==None else float(val)
def get_colour(self, val=None):
return mpl_to_tuple(self.line.get_color()) \
if val==None else string_to_float_tup(val)
def get_source(self, val=None) : return self.source if val==None else val
def get_markersize(self, val=None): return self.line.get_markersize() if val==None else float(val)
def get_markevery(self, val=None): return self.line.get_markevery() \
if val==None else (1 if val is None else int(val))
def get_markerfacecolor(self, val=None):
return mpl_to_tuple(self.line.get_markerfacecolor()) \
if val==None else string_to_float_tup(val)
def get_alpha(self, val=None): return self.line.get_alpha() if val==None else float(val)
def get_zorder(self, val=None): return self.line.get_zorder() if val==None else float(val)
def get_visible(self, val=None): return self.line.get_visible() if val==None else (val=='True')
def get_aa(self, val=None): return self.line.get_aa() if val==None else (val=='True')
def get_linestyle(self, val=None): return self.line.get_linestyle() if val==None else val
def get_solid_capstyle(self, val=None): return self.line.get_solid_capstyle() if val==None else val
def get_solid_joinstyle(self, val=None): return self.line.get_solid_joinstyle() if val==None else val
def get_dash_capstyle(self, val=None): return self.line.get_dash_capstyle() if val==None else val
def get_dash_joinstyle(self, val=None): return self.line.get_dash_joinstyle() if val==None else val
def get_resolution(self, val=None): return self.resolution if val==None else int(val)
def change_resolution(self, val) : self.resolution = val; self.replot()
def change_label(self, val) : self.line.set_label(val); self.set_aname_nice(val); self.plot.do_legend(); self.redraw()
def change_colour(self, val) : self.line.set_color(val); self.redraw()
def change_marker(self, val) : self.line.set_marker(val); self.redraw()
def change_linewidth(self, val) : self.line.set_linewidth(val); self.redraw()
def change_markersize(self, val) : self.line.set_markersize(val); self.redraw()
def change_markevery(self, val) : self.line.set_markevery(val); self.redraw()
def change_markerfacecolor(self, val) : self.line.set_markerfacecolor(val); self.redraw()
def change_alpha(self, val) : self.line.set_alpha(val); self.redraw()
def change_zorder(self, val) : self.line.set_zorder(val); self.redraw()
def change_visible(self, val) : self.line.set_visible(val); self.redraw()
def change_aa(self, val) : self.line.set_aa(val); self.redraw()
def change_linestyle(self, val) : self.line.set_linestyle(val); self.redraw()
def change_solid_capstyle(self, val) : self.line.set_solid_capstyle(val); self.redraw()
def change_solid_joinstyle(self, val) : self.line.set_solid_joinstyle(val); self.redraw()
def change_dash_capstyle(self, val) : self.line.set_dash_capstyle(val); self.redraw()
def change_dash_joinstyle(self, val) : self.line.set_dash_joinstyle(val); self.redraw()
#END PROPERTIES FUNCTIONS
def get_method_window(self) :
#frame = gtk.Frame(self.source)
hbox = gtk.HBox()
config_butt = gtk.Button(self.get_aname_nice()+'...')
config_butt.set_relief(gtk.RELIEF_NONE)
self.connect("aesthete-aname-nice-change", lambda o, a, v : self.set_label_for_button(config_butt,o))
self.connect("aesthete-aname-nice-change", lambda o, a, v : self.set_label_for_button(config_butt,o))
hbox.pack_start(config_butt)
remove_butt = gtk.Button(); remove_butt.add(gtk.image_new_from_stock(gtk.STOCK_CLEAR, gtk.ICON_SIZE_SMALL_TOOLBAR))
remove_butt.connect("clicked", lambda o : self.self_remove())
hbox.pack_start(remove_butt, False)
win = gtk.VBox()
nb = gtk.Notebook()
line_table_maker = PreferencesTableMaker()
line_table_maker.append_row("Label",
self.aes_method_entry_update("label"))
line_table_maker.append_row("Colour",
self.aes_method_colour_button("colour", "Set line colour"))
line_table_maker.append_row("Thickness",
self.aes_method_entry("linewidth",
wait_until_parsable_float=True))
line_table_maker.append_row("Alpha",
self.aes_method_entry_update("alpha"))
line_table_maker.append_row("Z order",
self.aes_method_entry_update("zorder"))
line_table_maker.append_row("Visible",
self.aes_method_toggle_button("visible", onoff=('On','Off')))
if self.source.needs_resolution :
line_table_maker.append_row("Resolution",
self.aes_method_entry_update("resolution", 'Set'))
line_table_maker.append_row("Antialiased",
self.aes_method_toggle_button("aa", onoff=('On','Off')))
nb.append_page(line_table_maker.make_table(), gtk.Label("General"))
marker_table_maker = PreferencesTableMaker()
marker_table_maker.append_row("Marker",
self.aes_method_entry_update("marker"))
marker_table_maker.append_row("Size",
self.aes_method_entry_update("markersize"))
marker_table_maker.append_row("Freq",
self.aes_method_entry_update("markevery"))
marker_table_maker.append_row("Colour",
self.aes_method_colour_button("markerfacecolor", "Set marker colour"))
nb.append_page(marker_table_maker.make_table(), gtk.Label("Marker"))
style_table_maker = PreferencesTableMaker()
style_table_maker.append_row("Line Style",
self.aes_method_entry_update("linestyle"))
style_table_maker.append_row("Solid Cap",
self.aes_method_entry_update("solid-capstyle"))
style_table_maker.append_row("Solid Join",
self.aes_method_entry_update("solid-joinstyle"))
style_table_maker.append_row("Dash Cap",
self.aes_method_entry_update("dash-capstyle"))
style_table_maker.append_row("Dash Join",
self.aes_method_entry_update("dash-joinstyle"))
nb.append_page(style_table_maker.make_table(), gtk.Label("Style"))
win.pack_start(nb, False)
if self.source is not None:
win.pack_start(gtk.HSeparator(), False)
update_hbox = gtk.HBox()
update_labl = gtk.Label()
update_labl.set_markup("from source <b>"+\
self.source.get_aname_nice()+\
"</b>")
update_labl.set_tooltip_text(self.source.get_aname_nice())
update_labl.set_alignment(1.0, 0.5)
update_hbox.pack_start(update_labl)
update_butt = gtk.Button()
update_butt.add(gtk.image_new_from_stock(gtk.STOCK_REFRESH,
gtk.ICON_SIZE_BUTTON))
update_butt.connect("clicked", lambda e : self.replot())
update_hbox.pack_start(update_butt, False)
win.pack_start(update_hbox, False)
self.connect("aesthete-property-change",
lambda o, p, v, a : self.set_label_for_button(config_butt))
win.show_all()
if self.env and self.env.action_panel :
win.hide()
win.aes_title = "Configure line"
config_butt.connect("clicked", lambda o : self.env.action_panel.to_action_panel(win))
else :
config_win = gtk.Window()
config_win.set_title("Configure line")
remove_butt = gtk.Button("Close"); remove_butt.connect("clicked", lambda o : config_win.hide())
win.pack_start(remove_butt)
config_win.add(win)
config_win.hide()
config_butt.connect("clicked", lambda o : config_win.show())
hbox.show_all()
#frame.add(win)
return hbox
class GlancerLine3D(GlancerLine) :
def __init__(self, plot, line = None, source = None, resolution=None, env=None,
read_series=False, axes = None, aname="GlancerLine3D",
time=None):
GlancerLine.__init__(self, plot, line=line, source=source,
resolution=resolution, env=env,
read_series=read_series, axes=axes, aname=aname, time=time)
def _line_from_source(self) :
if self.source is None :
return
x_range = self.axes.get_xlim() if self.source.needs_x_range else None
values = self.source.source_get_values(multi_array=True,
x_range=x_range,
time=self.get_time())[0]
points = values['values']
series = values['x']
dim = self.source.source_get_max_dim()
if dim <3 :
points = (points,)
trans = [series, points]
trans = (trans[0], trans[1][0], 0 if dim < 3 else trans[1][1])
self.line = self.axes.plot(*trans)[0]
# Can't replot as no set_zdata :(
self.axes.figure.canvas.draw()
class GlancerSurface(GlancerLineLike) :
line = None
plot = None
source = None
resolution = None
def get_useful_vars(self) :
return {
'line' : 'mpl Poly3DCollection',
}
def __init__(self, plot, line = None, source = None, resolution=None, env=None,
read_series=False, axes = None, aname="GlancerSurface",
time=None):
GlancerLineLike.__init__(self, plot, line=line, source=source,
resolution=resolution, env=env,
read_series=read_series, axes=axes, aname=aname, time=time)
#PROPERTIES
def get_aesthete_properties(self):
return { 'label' : [self.change_label, self.get_label, True],
'linewidth' : [self.change_linewidth, self.get_linewidth, True],
'source' : [None, self.get_source, True],
'alpha' : [self.change_alpha, self.get_alpha, True],
'zorder' : [self.change_zorder, self.get_zorder, True],
'visible' : [self.change_visible, self.get_visible, True],
'resolution' : [self.change_resolution, self.get_resolution, True],
'linestyle' : [self.change_linestyle, self.get_linestyle, True]}
#BEGIN PROPERTIES FUNCTIONS
def get_label(self, val=None): return self.line.get_label() if val==None else val
def get_marker(self, val=None): return self.line.get_marker() if val==None else val
def get_linewidth(self, val=None): return self.line.get_linewidth() if val==None else float(val)
def get_source(self, val=None) : return self.source if val==None else val
def get_alpha(self, val=None): return self.line.get_alpha() if val==None else float(val)
def get_zorder(self, val=None): return self.line.get_zorder() if val==None else float(val)
def get_visible(self, val=None): return self.line.get_visible() if val==None else (val=='True')
def get_linestyle(self, val=None): return self.line.get_linestyle() if val==None else val
def get_resolution(self, val=None): return self.resolution if val==None else int(val)
def change_label(self, val) : self.line.set_label(val); self.set_aname_nice(val); self.plot.do_legend(); self.redraw()
def change_linewidth(self, val) : self.line.set_linewidth(val); self.redraw()
def change_alpha(self, val) : self.line.set_alpha(val); self.redraw()
def change_zorder(self, val) : self.line.set_zorder(val); self.redraw()
def change_visible(self, val) : self.line.set_visible(val); self.redraw()
def change_linestyle(self, val) : self.line.set_linestyle(val); self.redraw()
def change_resolution(self, val) : self.resolution = val; self.replot()
#END PROPERTIES FUNCTIONS
def get_method_window(self) :
#frame = gtk.Frame(self.source)
hbox = gtk.HBox()
config_butt = gtk.Button(self.get_aname_nice()+'...')
config_butt.set_relief(gtk.RELIEF_NONE)
self.connect("aesthete-aname-nice-change", lambda o, a, v : self.set_label_for_button(config_butt,o))
self.connect("aesthete-aname-nice-change", lambda o, a, v : self.set_label_for_button(config_butt,o))
hbox.pack_start(config_butt)
remove_butt = gtk.Button(); remove_butt.add(gtk.image_new_from_stock(gtk.STOCK_CLEAR, gtk.ICON_SIZE_SMALL_TOOLBAR))
remove_butt.connect("clicked", lambda o : self.self_remove())
hbox.pack_start(remove_butt, False)
win = gtk.VBox()
nb = gtk.Notebook()
line_table_maker = PreferencesTableMaker()
line_table_maker.append_row("Label",
self.aes_method_entry_update("label", "Set"))
line_table_maker.append_row("Thickness",
self.aes_method_entry("linewidth",
wait_until_parsable_float=True))
line_table_maker.append_row("Alpha",
self.aes_method_entry_update("alpha", "Set"))
line_table_maker.append_row("Z order",
self.aes_method_entry_update("zorder", "Set"))
line_table_maker.append_row("Visible",
self.aes_method_toggle_button("visible", onoff=('On','Off')))
if self.source.needs_resolution :
line_table_maker.append_row("Resolution",
self.aes_method_entry_update("resolution", 'Set'))
nb.append_page(line_table_maker.make_table(), gtk.Label("General"))
style_table_maker = PreferencesTableMaker()
style_table_maker.append_row("Line Style",
self.aes_method_entry_update("linestyle", "Set"))
nb.append_page(style_table_maker.make_table(), gtk.Label("Style"))
win.pack_start(nb, False)
if self.source is not None:
win.pack_start(gtk.HSeparator(), False)
update_hbox = gtk.HBox()
update_labl = gtk.Label()
update_labl.set_markup("from source <b>"+\
self.source.get_aname_nice()+\
"</b>")
update_labl.set_alignment(1.0, 0.5)
update_hbox.pack_start(update_labl)
update_butt = gtk.Button()
update_butt.add(gtk.image_new_from_stock(gtk.STOCK_REFRESH,
gtk.ICON_SIZE_BUTTON))
update_butt.connect("clicked", lambda e : self.replot())
update_hbox.pack_start(update_butt, False)
win.pack_start(update_hbox, False)
self.connect("aesthete-property-change",
lambda o, p, v, a : self.set_label_for_button(config_butt))
win.show_all()
if self.env and self.env.action_panel :
win.hide()
win.aes_title = "Configure line"
config_butt.connect("clicked", lambda o : self.env.action_panel.to_action_panel(win))
else :
config_win = gtk.Window()
config_win.set_title("Configure line")
remove_butt = gtk.Button("Close"); remove_butt.connect("clicked", lambda o : config_win.hide())
win.pack_start(remove_butt)
config_win.add(win)
config_win.hide()
config_butt.connect("clicked", lambda o : config_win.show())
hbox.show_all()
#frame.add(win)
return hbox
def _line_from_source(self) :
if self.source is None :
return
x_range = self.axes.get_xlim() if self.source.needs_x_range else None
y_range = self.axes.get_ylim()
values = self.source.source_get_values(multi_array=True,
x_range=x_range,
y_range=y_range,
time=self.get_time(),
resolution=self.get_resolution())[0]
points = values['values']
xs = values['x']
ys = values['y']
self.line = self.axes.plot_surface(xs, ys, points, rstride=1, cstride=1,
cmap=cm.jet)
# Can't replot as no set_zdata :(
self.axes.figure.canvas.draw() | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glancer/Line.py | Line.py |
import os, math, sys, getopt, string
import pango
import random
from gtk import gdk
from ..tablemaker import PreferencesTableMaker
from aobject.paths import get_user_home
import threading
import cairo, gtk, gobject
import matplotlib
import numpy, numpy.fft
import scipy, scipy.interpolate, scipy.optimize
from matplotlib.backends.backend_cairo import RendererCairo
import pylab
from PIL import Image
from aobject.aobject import *
from Pie import GlancerPie
from Plot import GlancerPlot
from Plot3d import GlancerPlot3D
def add_icons() :
icon_factory = gtk.IconFactory()
icon_names = ('plot2d', 'pie', 'plot3d')
for icon_name in icon_names :
stock_id = 'aes-glancer-'+icon_name
source = gtk.IconSource()
source.set_filename(
paths.get_share_location() + 'images/icons/glancer/' + icon_name + '.svg')
icon_set = gtk.IconSet()
icon_set.add_source(source)
icon_factory.add(stock_id, icon_set)
icon_factory.add_default()
class GlancerPlotLayout(gtk.Layout) :
__gsignals__ = { "expose-event" : "override" }
resize_conn = None
# Priviledged child
child = None
def __init__(self) :
gtk.Layout.__init__(self)
self.connect('size-allocate', self.do_child_resize_event)
#widget.connect('size-allocate', lambda w, a : self.frame.set(.5,.5))
def add(self, child) :
gtk.Layout.add(self, child)
self.child = child
child.set_size_request(self.allocation.width, self.allocation.height)
self.resize_conn = child.connect('size-allocate', self.do_child_resize_event)
self.do_child_resize_event(child, None)
def do_child_resize_event(self, child, event) :
child = self.child
if child is None :
return
w = child.allocation.width
h = child.allocation.height
if w > 1 and h > 1 :
wrat = w/float(self.allocation.width)
hrat = h/float(self.allocation.height)
rat = max(wrat, hrat)
if abs(rat-1.) > 1e-5 and rat > 0 :
child.figure.set_dpi(child.figure.get_dpi()/rat)
child.set_size_request(int(w/rat),
int(h/rat))
child.queue_draw()
al = self.allocation
x = max(int(.5*(al.width-w)), 0)
y = max(int(.5*(al.height-h)), 0)
if x != child.allocation.x or y != child.allocation.y :
self.move(child, x, y)
def remove(self, child) :
child.mpl_disconnect(self.resize_conn)
gtk.Layout.remove(self, child)
def do_expose_event(self, event) :
ret = gtk.Layout.do_expose_event(self, event)
cr = self.get_bin_window().cairo_create()
cr.set_source_rgb(1.,1.,.8)
cr.paint()
if self.child is not None :
al = self.child.allocation
cr.rectangle(al.x-5, al.y-5, al.width+10, al.height+10)
cr.set_line_width(5.)
cr.set_source_rgb(1.,.9,.8)
cr.stroke()
return ret
class Glancer(gtk.Frame, AObject) :
__gsignals__ = { "expose-event" : "override", "loaded-sim" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, ( gobject.TYPE_STRING,)),
}
fig = None
read_series = True
def savefig(self) :
'''Save image of figure to file.'''
chooser = gtk.FileChooserDialog(\
title="Save Image", action=gtk.FILE_CHOOSER_ACTION_SAVE,
buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_SAVE,gtk.RESPONSE_OK))
chooser.set_current_folder(get_user_home())
chooser.set_default_response(gtk.RESPONSE_OK)
chooser.set_current_name('.png')
resp = chooser.run()
self.grab_focus()
if resp == gtk.RESPONSE_OK :
filename = chooser.get_filename()
chooser.destroy()
else :
chooser.destroy()
return
self.fig.savefig(filename, format='png')
def set_ui(self) :
self.ui_action_group = gtk.ActionGroup('GlancerActions')
self.ui_action_group.add_actions([('GlancerMenu', None, 'Glancer'),
('GlancerSaveImage', None, 'Save image', None, None,
lambda w : self.savefig()),
])
self.ui_ui_string = '''
<ui>
<menubar name="MenuBar">
<menu action="GlancerMenu">
<menuitem action="GlancerSaveImage"/>
</menu>
</menubar>
</ui>
'''
def __init__(self, env=None):
gtk.Frame.__init__(self)
vbox = gtk.VBox()
self.fig = matplotlib.pyplot.figure()
self.plotter = None
self.frame = GlancerPlotLayout()
vbox.pack_start(self.frame)
self.time_hbox = gtk.HBox()
self.time_hbox.pack_start(gtk.Label('Time (as tuple) : '), False)
vbox.pack_start(self.time_hbox, False)
vbox.show_all()
self.time_hbox.hide()
self.add(vbox)
self.set_size_request(0, 200)
self.source_action = lambda s :\
self.load_series(get_object_from_dictionary(s))
AObject.__init__(self, "Glancer", env, view_object = True)
self.set_aname_nice("Plot" + (" ("+str(self.get_aname_num())+")" if self.get_aname_num()>1 else ""))
def load_series(self, source, series = None, vals = None):
if self.plotter is None :
dims = source.source_get_max_dim()
if dims == 3:
self.new_plotter("3D Plot", GlancerPlot3D)
else :
self.new_plotter("2D Plot", GlancerPlot)
self.plotter.load_series(source, series=series, vals=vals)
self.elevate()
self.queue_draw()
def aes_add_a(self, aname_root, **parameters) :
if aname_root in ('GlancerLine', 'GlancerLegend') :
return self.plotter.aes_add_a(aname_root, **parameters)
if aname_root == 'GlancerPlot' :
self.new_plotter("2D Plot", GlancerPlot)
return self.plotter
return AObject.aes_add_a(self, aname_root, **parameters)
def new_plotter(self, title, plotter) :
self.check_clear()
if self.plotter is not None :
self.frame.remove(self.frame.get_child())
self.time_hbox.remove(self.time_hbox.get_children()[1])
self.plotter.aes_remove()
del self.plotter
self.fig.clear()
self.plotter = plotter(self.fig, queue_draw=lambda : self.queue_draw(), env=self.get_aenv())
self.absorb_properties(self.plotter, as_self=False)
self.frame.add(self.plotter.canvas)
self.frame.show_all()
self.time_hbox.pack_start(self.plotter.time_entr)
global object_dictionary; object_dictionary.set_show(self, True)
self.set_aname_nice(title)
self.queue_draw()
def do_expose_event(self, event):
ret = gtk.Frame.do_expose_event(self, event)
cr = self.window.cairo_create()
cr.rectangle ( event.area.x, event.area.y, event.area.width, event.area.height)
cr.clip()
self.draw(cr, *self.window.get_size())
return ret
def print_out(self, op, pc, pn) :
w = pc.get_width(); h = pc.get_height()
w1, h1 = self.plotter.canvas.get_width_height()
r = 1
if w/h > w1/h1 : w = h*w1/h1; r = h/h1
else : h = w*h1/w1; r = w/w1
#op.cancel()
c = pc.get_cairo_context()
c.scale(r, r)
renderer = RendererCairo (self.plotter.canvas.figure.dpi)
renderer.set_width_height (w1, h1)
renderer.gc.ctx = pc.get_cairo_context()
self.plotter.canvas.figure.draw (renderer)
def draw(self, cr, swidth, sheight):
if self.plotter is not None :
self.plotter.canvas.draw()
#PROPERTIES
def get_auto_aesthete_properties(self):
return { }
#BEGIN PROPERTIES FUNCTIONS
#END PROPERTIES FUNCTIONS
def replot_all(self) :
for line in self.lines :
line.replot()
def get_method_window(self) :
win = gtk.VBox()
icon_table = gtk.Table(3, 1)
win.pack_start(icon_table)
new_butt = gtk.ToggleButton()
new_butt.set_image(gtk.image_new_from_stock(gtk.STOCK_NEW,
gtk.ICON_SIZE_BUTTON))
new_butt.set_tooltip_text("New plot")
icon_table.attach(new_butt, 0, 1, 0, 1)
new_menu = gtk.Menu()
new_menu.attach_to_widget(new_butt, None)
new_butt.connect("button_press_event", lambda w, e :\
new_menu.popup(None, None, None, e.button, e.time) \
if not w.get_active() else \
new_menu.popdown())
new_menu.set_title('New Plot')
new_menu.modify_bg(gtk.STATE_NORMAL, gtk.gdk.Color(65535, 65535, 65535))
new_labl = gtk.Label()
new_labl.set_markup('<b>New...</b>')
new_meni = gtk.MenuItem()
new_meni.add(new_labl)
new_menu.append(new_meni)
plot_butt = gtk.MenuItem()
plot_butt.add(gtk.image_new_from_stock('aes-glancer-plot2d',
gtk.ICON_SIZE_BUTTON))
plot_butt.set_tooltip_text("New 2D plot")
plot_butt.connect("activate", lambda o : self.new_plotter("2D Plot", GlancerPlot))
new_menu.append(plot_butt)
pie_butt = gtk.MenuItem()
pie_butt.add(gtk.image_new_from_stock('aes-glancer-pie',
gtk.ICON_SIZE_BUTTON))
pie_butt.set_tooltip_text("New pie chart")
pie_butt.connect("activate", lambda o : self.new_plotter("Pie Chart", GlancerPie))
new_menu.append(pie_butt)
plot_butt = gtk.MenuItem()
plot_butt.add(gtk.image_new_from_stock('aes-glancer-plot3d',
gtk.ICON_SIZE_BUTTON))
plot_butt.set_tooltip_text("New 3D plot")
plot_butt.connect("activate", lambda o : self.new_plotter("3D Plot", GlancerPlot3D))
new_menu.append(plot_butt)
new_menu.show_all()
sim_butt = gtk.Button()
sim_butt.set_image(gtk.image_new_from_stock(gtk.STOCK_INDEX,
gtk.ICON_SIZE_BUTTON))
sim_butt.set_tooltip_text("Plot from currently selected Source")
sim_butt.set_sensitive(get_object_dictionary().selected_source \
is not None)
icon_table.attach(sim_butt, 1, 2, 0, 1)
sim_butt.connect("clicked", lambda o : self.load_from_sim(\
get_object_dictionary().selected_source))
get_object_dictionary().connect(\
'aesthete-selected-source-change',
lambda tr : sim_butt.set_sensitive(True))
win.set_border_width(5)
win.show_all()
return win
def check_clear(self, force = False) :
if self.plotter is not None :
self.plotter.check_clear(force=force)
def load_from_sim(self, aname) :
if aname == None or aname == 'None' or aname == '' : return
sim = get_object_from_dictionary(aname)
self.check_clear()
dim = sim.source_get_max_dim()
if len(values) == 1 :
self.load_series(sim)
else :
for point_set in values :
points = point_set['values']
series = range(0, len(points[0]))
if dim > 1 :
trans = zip(*points)
if not self.read_series : trans[0] = series
else :
trans = [series, points]
self.load_series(None, trans[0], trans[1])
add_icons()
aname_root_catalog['Glancer'] = Glancer | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glancer/Glancer.py | Glancer.py |
import os, math, sys, getopt, string
from aobject.utils import debug_print
import pango
import random
from gtk import gdk
from ..tablemaker import PreferencesTableMaker
import threading
import cairo, gtk, gobject
import matplotlib
import numpy, numpy.fft
import scipy, scipy.interpolate, scipy.optimize
from matplotlib.backends.backend_cairo import RendererCairo
from matplotlib.backends.backend_gtkcairo import FigureCanvasGTKCairo as mpl_Canvas
from matplotlib.backends.backend_gtkcairo import NavigationToolbar2Cairo as mpl_Navbar
from mpl_toolkits.mplot3d import Axes3D
import pylab
from PIL import Image
from aobject.aobject import *
from Canvas import *
from Line import GlancerLine, GlancerLine3D, GlancerSurface
from Legend import GlancerLegend
from Plot import GlancerPlotLike
legend_locs = matplotlib.legend.Legend.codes
legend_locs_rev = dict((legend_locs[k],k) for k in legend_locs)
class GlancerPlot3D(GlancerPlotLike) :
max_dim = 3
def do_mpl_scroll_event(self, event) :
'''Handle scrolling ourselves.'''
if event.inaxes != self.axes :
return False
self.axes.set_autoscale_on(False)
xl = self.axes.get_xlim()
yl = self.axes.get_ylim()
ec = (event.xdata, event.ydata)
# event.step tells direction
spd = (1+self.scroll_speed) ** (-event.step)
# unfortunately, this seems to be the only sensible way to
# get to the modifiers. Phrased oddly, but says do_x if we're
# not told to only do y, and v.v.
do_specific = event.guiEvent.state & gtk.gdk.CONTROL_MASK
do_x = not (do_specific and (event.guiEvent.state & gtk.gdk.SHIFT_MASK))
do_y = not (do_specific and do_x)
if do_x :
self.axes.set_xlim(ec[0] - (ec[0]-xl[0])*spd,
ec[0] - (ec[0]-xl[1])*spd)
if do_y :
self.axes.set_ylim(ec[1] - (ec[1]-yl[0])*spd,
ec[1] - (ec[1]-yl[1])*spd)
self.queue_draw()
return True
_move_from = None
_move_from_xl = None
_move_from_yl = None
def do_mpl_button_press_event(self, event) :
'''Check button presses.'''
if event.inaxes != self.axes :
return False
m_control = event.guiEvent.state & gtk.gdk.CONTROL_MASK
if event.button == 2 :
if m_control :
self.axes.autoscale_view()
self.axes.set_autoscale_on(True)
self.queue_draw()
else :
self.axes.set_autoscale_on(False)
self._move_from = (event.x, event.y)
self._move_from_xl = self.axes.get_xlim()
self._move_from_yl = self.axes.get_ylim()
self.queue_draw()
return True
return False
def do_mpl_button_release_event(self, event) :
'''Check button releases.'''
if event.button == 2 :
self._move_from = None
self._move_from_xl = None
self._move_from_yl = None
self.queue_draw()
return True
return False
def do_mpl_motion_notify_event(self, event) :
'''Check motion notifications.'''
if event.inaxes != self.axes :
return False
do_specific = event.guiEvent.state & gtk.gdk.CONTROL_MASK
do_x = not (do_specific and (event.guiEvent.state & gtk.gdk.SHIFT_MASK))
do_y = not (do_specific and do_x)
if self._move_from is not None :
dx = (event.x-self._move_from[0])
dy = (event.y-self._move_from[1])
l,b,r,t = self.axes.bbox.extents
el,er = self.axes.get_xlim()
eb,et = self.axes.get_ylim()
dx = dx*(er-el)/(r-l)
dy = dy*(et-eb)/(t-b)
if do_x :
self.axes.set_xlim(self._move_from_xl[0]-dx,
self._move_from_xl[1]-dx)
if do_y :
self.axes.set_ylim(self._move_from_yl[0]-dy,
self._move_from_yl[1]-dy)
self.queue_draw()
return True
def __init__(self, fig, queue_draw, env=None):
self.axes = Axes3D(fig)
GlancerPlotLike.__init__(self, "GlancerPlot", fig, queue_draw, env=env)
def load_series(self, source, series = None, vals = None):
resolution = self.get_resolution()
if source is None :
mpl_line, = self.axes.plot(series, vals)
line = GlancerLine(self, line=mpl_line, env=self.get_aenv(),
time=self.get_time())
elif len(source.get_domain_cols()) == 2 :
line = GlancerSurface(self, source=source, axes=self.axes,
read_series=self.read_series,
resolution=resolution,
env=self.get_aenv(), time=self.get_time())
else :
line = GlancerLine3D(self, source=source, axes=self.axes,
read_series=self.read_series,
resolution=resolution,
env=self.get_aenv(), time=self.get_time())
self.lines.append(line)
self.absorb_properties(line, as_self = False)
line.change_property("label", source.get_aname_nice())
self.do_legend()
self.update_canvas_size()
self.time_entr.get_parent().set_visible(source.get_time_cols() is not None)
return line
def get_auto_aesthete_properties(self) :
return {'time' : (string_to_float_tup, (AOBJECT_CAN_NONE,)),
'time_args' : (int, (AOBJECT_CAN_NONE,)),
}
#PROPERTIES
def get_aesthete_properties(self):
return { 'plot_over' : [self.change_plot_over, self.get_plot_over, True],
'legend' : [self.change_legend, self.get_legend, True],
'figure_facecolor' : [self.change_figure_facecolor, self.get_figure_facecolor, True],
'axes_axis_bgcolor' : [self.change_axes_axis_bgcolor, self.get_axes_axis_bgcolor, True],
'axes_xlabel' : [self.change_axes_xlabel, self.get_axes_xlabel, True],
'axes_ylabel' : [self.change_axes_ylabel, self.get_axes_ylabel, True],
'title_font' : [self.change_title_font, self.get_title_font, True],
'xmultiplier' : [self.change_xmultiplier, self.get_xmultiplier, True],
'ymultiplier' : [self.change_ymultiplier, self.get_ymultiplier, True],
'read_series' : [self.change_read_series, self.get_read_series, True],
'legend_loc' : [self.change_legend_loc, self.get_legend_loc, True],
'title' : [self.change_title, self.get_title, True],
'resolution' : [self.change_resolution, self.get_resolution, True] }
#BEGIN PROPERTIES FUNCTIONS
def get_resolution(self, val=None) :
if val != None :
return int(val)
if self.lines == [] :
return 10
return self.lines[0].get_resolution()
def change_resolution(self, val) :
for line in self.lines :
line.set_resolution(val)
self.queue_draw()
def get_xmultiplier(self, val=None) : return 1. if val==None else float(val)
def get_ymultiplier(self, val=None) : return 1. if val==None else float(val)
def get_plot_over(self, val=None): return self.plot_over if val==None else (val=='True')
def get_legend(self, val=None): return self.legend if val==None else (val=='True')
def get_read_series(self, val=None): return self.read_series if val==None else (val=='True')
def get_title(self, val=None): return self.title if val==None else val
def get_legend_loc(self, val=None) :
return (legend_locs_rev[self.legend_object.get_loc()] if self.legend_object else '')\
if val==None else val
def get_axes_axis_bgcolor(self, val=None):
return mpl_to_tuple(self.axes.get_axis_bgcolor()) \
if val==None else string_to_float_tup(val)
def get_figure_facecolor(self, val=None):
return mpl_to_tuple(self.fig.get_facecolor()) \
if val==None else string_to_float_tup(val)
def get_axes_xlabel(self, val=None) : return self.axes.get_xlabel() if val==None else val
def get_axes_ylabel(self, val=None) : return self.axes.get_ylabel() if val==None else val
def get_xhide_oom(self, val=None) :
return False \
if val==None else (val=='True')
def get_yhide_oom(self, val=None) :
return False \
if val==None else (val=='True')
def get_title_font(self, val=None) :
label_props = self.axes.title.get_fontproperties()
return mpl_to_font(label_props) \
if val==None else val
def change_legend_loc(self, val) : self.do_legend(loc = val)
def change_plot_over(self, val): self.plot_over = val
def change_title(self, val) :
self.title = val
self.axes.set_title(self.title, visible = (self.title!=''))
self.queue_draw()
def change_title_font(self, val) :
label_props = self.axes.title.get_fontproperties()
font_to_mpl(label_props, val)
def change_read_series(self, val) : self.read_series = val
def change_xmultiplier(self, val=None) : self.axes.xaxis.get_major_formatter().multiplier = val; self.queue_draw()
def change_ymultiplier(self, val=None) : self.axes.yaxis.get_major_formatter().multiplier = val; self.queue_draw()
def change_legend(self, val) : self.legend = val; self.do_legend()
def change_axes_axis_bgcolor(self, val) : self.axes.set_axis_bgcolor(val); self.queue_draw()
def change_axes_xlabel(self, val) : self.axes.set_xlabel(val); self.queue_draw()
def change_axes_ylabel(self, val) : self.axes.set_ylabel(val); self.queue_draw()
def change_figure_facecolor(self, val) : self.fig.set_facecolor(val); self.queue_draw()
#END PROPERTIES FUNCTIONS
def get_method_window(self) :
#fram = gtk.Frame()
#fram.modify_bg(gtk.STATE_NORMAL, gtk.gdk.Color(1, 1, 1))
win = gtk.VBox()
who_algn = gtk.Alignment(0.5, 0.5)
who_algn.set_property("top_padding", 10)
who_hbox = gtk.HBox(spacing=5)
who_hbox.pack_start(gtk.image_new_from_stock('aes-glancer-plot3d',
gtk.ICON_SIZE_BUTTON),
False)
who_hbox.pack_start(gtk.Label("3D Plot"), False)
who_algn.add(who_hbox)
win.pack_start(who_algn)
icon_table = gtk.Table(1, 5)
win.pack_start(icon_table)
# Visual Config
config_butt = gtk.Button()
config_butt.set_image(gtk.image_new_from_stock(gtk.STOCK_PAGE_SETUP,
gtk.ICON_SIZE_BUTTON))
config_butt.set_tooltip_text("Appearance preferences...")
icon_table.attach(config_butt, 0, 1, 0, 1)
config_win = gtk.Window(); config_win.set_size_request(400, -1)
config_win.set_title("Configure plot appearance")
config_vbox = self.methods_make_visual_config()
config_win.add(config_vbox); config_win.set_transient_for(self.get_aenv().toplevel)
config_butt.connect("clicked", lambda o : config_win.show())
config_remove_butt = gtk.Button("Close")
config_remove_butt.connect("clicked", lambda o : config_win.hide())
config_remove_butt.show_all()
config_hbox = gtk.HBox(); config_hbox.show()
config_hbox.pack_start(config_remove_butt, False, False, 5)
config_vbox.pack_end(config_hbox, False, False, 5)
# Import Config
legend_amtb = self.aes_method_toggle_button("legend", None,
preferencable=False)
legend_amtb.set_image(gtk.image_new_from_stock(gtk.STOCK_JUSTIFY_RIGHT,
gtk.ICON_SIZE_BUTTON))
legend_amtb.set_tooltip_text("Toggle legend")
icon_table.attach(legend_amtb, 1, 2, 0, 1)
plot_over_amtb = self.aes_method_toggle_button("plot_over", None,
preferencable=False)
plot_over_amtb.set_image(gtk.image_new_from_stock(gtk.STOCK_DND_MULTIPLE,
gtk.ICON_SIZE_BUTTON))
plot_over_amtb.set_tooltip_text("Toggle overlay of new plots")
icon_table.attach(plot_over_amtb, 2, 3, 0, 1)
# From Sim
sim_hbox = gtk.HBox()
#sim_cmbo = gtk.ComboBox( get_object_dictionary().get_liststore_by_am('Source') )
#sim_cllr = gtk.CellRendererText(); sim_cmbo.pack_start(sim_cllr); sim_cllr.props.ellipsize = pango.ELLIPSIZE_END;
#sim_cmbo.add_attribute(sim_cllr, 'text', 1)
#self.sim_cmbo = sim_cmbo
#sim_hbox.pack_start(sim_cmbo)
clear_butt = gtk.Button()
clear_butt.set_image(gtk.image_new_from_stock(gtk.STOCK_CLEAR,
gtk.ICON_SIZE_BUTTON))
clear_butt.set_tooltip_text("Clear all lines")
icon_table.attach(clear_butt, 0, 1, 1, 2)
clear_butt.connect("clicked", lambda o : self.check_clear(force=True))
replot_butt = gtk.Button()
replot_butt.set_image(gtk.image_new_from_stock(gtk.STOCK_REFRESH,
gtk.ICON_SIZE_BUTTON))
replot_butt.set_tooltip_text("Replot all lines")
replot_butt.connect("clicked", lambda o : self.replot_all())
icon_table.attach(replot_butt, 1, 2, 1, 2)
#fram.add(win)
win.show_all()
return win
def methods_make_visual_config(self) :
config_vbox = gtk.VBox()
config_ntbk = gtk.Notebook()
general_table_maker = PreferencesTableMaker()
general_table_maker.append_heading("Title")
general_table_maker.append_row("Title", self.aes_method_entry("title"))
general_table_maker.append_row("Title Font", self.aes_method_font_button("title_font", "Set title font"))
general_table_maker.append_heading("Colours")
general_table_maker.append_row("Face Colour", self.aes_method_colour_button("figure_facecolor", "Set figure colour"))
general_table_maker.append_row("Axes Background",self.aes_method_colour_button("axes_axis_bgcolor", "Axes Background Colour"))
config_tabl = general_table_maker.make_table()
config_tabl_vbox = gtk.VBox(); config_tabl_vbox.pack_start(config_tabl, False)
config_ntbk.append_page(config_tabl_vbox, gtk.Label("General"))
legend_table_maker = PreferencesTableMaker()
legend_table_maker.append_heading("Geometry")
legend_position_cmbo = gtk.combo_box_new_text()
for loc in legend_locs : legend_position_cmbo.append_text(loc)
self.aes_method_automate_combo_text(legend_position_cmbo, "legend_loc")
legend_table_maker.append_row("Position", legend_position_cmbo)
config_tabl = legend_table_maker.make_table()
config_tabl_vbox = gtk.VBox(); config_tabl_vbox.pack_start(config_tabl, False)
config_ntbk.append_page(config_tabl_vbox, gtk.Label("Legend"))
axes = { 'x' : "X" , 'y' : "Y" }
for axis in axes :
axes_table_maker = PreferencesTableMaker()
axes_table_maker.append_heading("Labeling")
axes_table_maker.append_row(axes[axis]+" Axes Label", self.aes_method_entry("axes_"+axis+"label"))
config_tabl = axes_table_maker.make_table()
config_tabl_vbox = gtk.VBox(); config_tabl_vbox.pack_start(config_tabl, False);
config_ntbk.append_page(config_tabl_vbox, gtk.Label(axes[axis]+" Axis"))
config_vbox.pack_start(config_ntbk)
config_vbox.show_all()
return config_vbox | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glancer/Plot3d.py | Plot3d.py |
import os, math, sys, getopt, string
import pango
import random
from gtk import gdk
from ..tablemaker import PreferencesTableMaker
import threading
import cairo, gtk, gobject
import matplotlib
import numpy, numpy.fft
import scipy, scipy.interpolate, scipy.optimize
from matplotlib.backends.backend_cairo import RendererCairo
from matplotlib.backends.backend_gtkcairo import FigureCanvasGTKCairo as mpl_Canvas
from matplotlib.backends.backend_gtkcairo import NavigationToolbar2Cairo as mpl_Navbar
import pylab
from PIL import Image
from Canvas import *
from aobject.aobject import *
legend_locs = matplotlib.legend.Legend.codes
legend_locs_rev = dict((legend_locs[k],k) for k in legend_locs)
class GlancerPie(AObject) :
fig = None
axes = None
canvas = None
pie = None
source = None
legend_object = None
legend = True
title = ''
read_series = True
scroll_speed = 0.1
def replot(self) :
self._pie_from_source()
def _pie_from_source(self) :
if self.source is None :
return
x_range = self.axes.get_xlim() if self.source.needs_x_range else None
values = self.source.source_get_values(multi_array=True, x_range=x_range)[0]
points = values['values']
series = range(0, len(points[0]))
dim = self.source.source_get_max_dim()
if dim > 1 :
trans = zip(*points)
if not self.read_series :
trans[0] = series
else :
trans = [series, points]
if self.pie is None :
self.pie = self.axes.pie(trans[1])[0]
else :
self.pie.set_xdata(trans[0])
self.pie.set_ydata(trans[1])
self.axes.figure.canvas.draw()
def do_mpl_scroll_event(self, event) :
'''Handle scrolling ourselves.'''
if event.inaxes != self.axes :
return False
self.axes.set_autoscale_on(False)
xl = self.axes.get_xlim()
yl = self.axes.get_ylim()
ec = (event.xdata, event.ydata)
# event.step tells direction
spd = (1+self.scroll_speed) ** (-event.step)
# unfortunately, this seems to be the only sensible way to
# get to the modifiers. Phrased oddly, but says do_x if we're
# not told to only do y, and v.v.
do_specific = event.guiEvent.state & gtk.gdk.CONTROL_MASK
do_x = not (do_specific and (event.guiEvent.state & gtk.gdk.SHIFT_MASK))
do_y = not (do_specific and do_x)
if do_x :
self.axes.set_xlim(ec[0] - (ec[0]-xl[0])*spd,
ec[0] - (ec[0]-xl[1])*spd)
if do_y :
self.axes.set_ylim(ec[1] - (ec[1]-yl[0])*spd,
ec[1] - (ec[1]-yl[1])*spd)
self.queue_draw()
return True
_move_from = None
_move_from_xl = None
_move_from_yl = None
def do_mpl_button_press_event(self, event) :
'''Check button presses.'''
if event.inaxes != self.axes :
return False
m_control = event.guiEvent.state & gtk.gdk.CONTROL_MASK
if event.button == 2 :
if m_control :
self.axes.autoscale_view()
self.axes.set_autoscale_on(True)
self.queue_draw()
else :
self.axes.set_autoscale_on(False)
self._move_from = (event.x, event.y)
self._move_from_xl = self.axes.get_xlim()
self._move_from_yl = self.axes.get_ylim()
self.queue_draw()
return True
return False
def do_mpl_button_release_event(self, event) :
'''Check button releases.'''
if event.button == 2 :
self._move_from = None
self._move_from_xl = None
self._move_from_yl = None
self.queue_draw()
return True
return False
def do_mpl_motion_notify_event(self, event) :
'''Check motion notifications.'''
if event.inaxes != self.axes :
return False
do_specific = event.guiEvent.state & gtk.gdk.CONTROL_MASK
do_x = not (do_specific and (event.guiEvent.state & gtk.gdk.SHIFT_MASK))
do_y = not (do_specific and do_x)
if self._move_from is not None :
dx = (event.x-self._move_from[0])
dy = (event.y-self._move_from[1])
l,b,r,t = self.axes.bbox.extents
el,er = self.axes.get_xlim()
eb,et = self.axes.get_ylim()
dx = dx*(er-el)/(r-l)
dy = dy*(et-eb)/(t-b)
if do_x :
self.axes.set_xlim(self._move_from_xl[0]-dx,
self._move_from_xl[1]-dx)
if do_y :
self.axes.set_ylim(self._move_from_yl[0]-dy,
self._move_from_yl[1]-dy)
self.queue_draw()
return True
def __init__(self, fig, queue_draw, env=None):
self.queue_draw = queue_draw
self.fig = fig
self.canvas = GlancerCanvas(self.fig)
self.axes = self.fig.add_subplot(1,1,1)
self.canvas.mpl_connect('scroll_event', self.do_mpl_scroll_event)
self.canvas.mpl_connect('button_press_event', self.do_mpl_button_press_event)
self.canvas.mpl_connect('button_release_event',
self.do_mpl_button_release_event)
self.canvas.mpl_connect('motion_notify_event',
self.do_mpl_motion_notify_event)
AObject.__init__(self, "GlancerPie", env, view_object=False)
def load_series(self, source, series=None, vals=None):
if series is not None :
raise RuntimeError(
'Sorry, GlypherPie can only plot single series Sources')
self.source = source
self._pie_from_source()
def redraw(self) :
self.do_legend()
self.figure.canvas.draw()
def __del__(self) :
get_object_dictionary().disconnect(self.sd_chg_conn)
AObject.__del__(self)
def do_legend(self, loc = None) :
if len(self.lines) > 0 and self.legend :
self.axes.legend(loc=loc)
if self.legend_object is not None :
self.legend_object.aes_remove()
self.legend_object = None
self.legend_object = GlancerLegend(self.axes.legend_, env = self.get_aenv())
self.absorb_properties(self.legend_object, as_self = False)
if loc==None : self.emit_property_change("legend_loc")
else : self.axes.legend_ = None
self.canvas.draw()
def check_legend(self) :
if self.legend_object :
self.legend_object.aes_remove()
self.legend_object = None
def check_clear(self, force = False) :
self.axes.clear()
self.check_legend()
self.queue_draw()
#PROPERTIES
def get_aesthete_properties(self):
return {
'source' : [None, self.get_source, True],
'legend' : [self.change_legend, self.get_legend, True],
'figure_facecolor' : [self.change_figure_facecolor, self.get_figure_facecolor, True],
'axes_axis_bgcolor' : [self.change_axes_axis_bgcolor, self.get_axes_axis_bgcolor, True],
'axes_xlabel' : [self.change_axes_xlabel, self.get_axes_xlabel, True],
'axes_ylabel' : [self.change_axes_ylabel, self.get_axes_ylabel, True],
'title_font' : [self.change_title_font, self.get_title_font, True],
'xlabel_font' : [self.change_xlabel_font, self.get_xlabel_font, True],
'ylabel_font' : [self.change_ylabel_font, self.get_ylabel_font, True],
'xhide_oom' : [self.change_xhide_oom, self.get_xhide_oom, True],
'yhide_oom' : [self.change_yhide_oom, self.get_yhide_oom, True],
'xtick_font' : [self.change_xtick_font, self.get_xtick_font, True],
'ytick_font' : [self.change_ytick_font, self.get_ytick_font, True],
'xmultiplier' : [self.change_xmultiplier, self.get_xmultiplier, True],
'ymultiplier' : [self.change_ymultiplier, self.get_ymultiplier, True],
'read_series' : [self.change_read_series, self.get_read_series, True],
'legend_loc' : [self.change_legend_loc, self.get_legend_loc, True],
'title' : [self.change_title, self.get_title, True] }
#BEGIN PROPERTIES FUNCTIONS
def get_source(self, val=None) : return self.source if val==None else val
def get_xmultiplier(self, val=None) : return 1. if val==None else float(val)
def get_ymultiplier(self, val=None) : return 1. if val==None else float(val)
def get_legend(self, val=None): return self.legend if val==None else (val=='True')
def get_read_series(self, val=None): return self.read_series if val==None else (val=='True')
def get_title(self, val=None): return self.title if val==None else val
def get_legend_loc(self, val=None) :
return (legend_locs_rev[self.legend_object.get_loc()] if self.legend_object else '')\
if val==None else val
def get_axes_axis_bgcolor(self, val=None):
return mpl_to_tuple(self.axes.get_axis_bgcolor()) \
if val==None else string_to_float_tup(val)
def get_figure_facecolor(self, val=None):
return mpl_to_tuple(self.fig.get_facecolor()) \
if val==None else string_to_float_tup(val)
def get_axes_xlabel(self, val=None) : return self.axes.get_xlabel() if val==None else val
def get_axes_ylabel(self, val=None) : return self.axes.get_ylabel() if val==None else val
def get_xhide_oom(self, val=None) :
return False \
if val==None else (val=='True')
def get_yhide_oom(self, val=None) :
return False \
if val==None else (val=='True')
def get_xtick_font(self, val=None) :
tick_props = self.axes.get_xaxis().get_major_ticks()[0].label1.get_fontproperties()
return mpl_to_font(tick_props) \
if val==None else val
def get_ytick_font(self, val=None) :
tick_props = self.axes.get_yaxis().get_major_ticks()[0].label1.get_fontproperties()
return mpl_to_font(tick_props) \
if val==None else val
def get_xlabel_font(self, val=None) :
label_props = self.axes.get_xaxis().get_label().get_fontproperties()
return mpl_to_font(label_props) \
if val==None else val
def get_ylabel_font(self, val=None) :
label_props = self.axes.get_yaxis().get_label().get_fontproperties()
return mpl_to_font(label_props) \
if val==None else val
def get_title_font(self, val=None) :
label_props = self.axes.title.get_fontproperties()
return mpl_to_font(label_props) \
if val==None else val
def change_legend_loc(self, val) : self.do_legend(loc = val)
def change_title(self, val) :
self.title = val
self.axes.set_title(self.title, visible = (self.title!=''))
self.queue_draw()
def change_xhide_oom(self, val) :
self.axes.get_xaxis().major.formatter.hide_oom = val
self.queue_draw()
def change_yhide_oom(self, val) :
self.axes.get_yaxis().major.formatter.hide_oom = val
self.queue_draw()
def change_ytick_font(self, val) :
ticks = self.axes.get_yaxis().get_major_ticks()
for tick in ticks :
font_to_mpl(tick.label1.get_fontproperties(), val)
def change_xtick_font(self, val) :
ticks = self.axes.get_xaxis().get_major_ticks()
for tick in ticks :
font_to_mpl(tick.label1.get_fontproperties(), val)
def change_xlabel_font(self, val) :
label_props = self.axes.get_xaxis().get_label().get_fontproperties()
font_to_mpl(label_props, val)
def change_ylabel_font(self, val) :
label_props = self.axes.get_yaxis().get_label().get_fontproperties()
font_to_mpl(label_props, val)
def change_title_font(self, val) :
label_props = self.axes.title.get_fontproperties()
font_to_mpl(label_props, val)
def change_read_series(self, val) : self.read_series = val
def change_xmultiplier(self, val=None) : self.axes.xaxis.get_major_formatter().multiplier = val; self.queue_draw()
def change_ymultiplier(self, val=None) : self.axes.yaxis.get_major_formatter().multiplier = val; self.queue_draw()
def change_legend(self, val) : self.legend = val; self.do_legend()
def change_axes_axis_bgcolor(self, val) : self.axes.set_axis_bgcolor(val); self.queue_draw()
def change_axes_xlabel(self, val) : self.axes.set_xlabel(val); self.queue_draw()
def change_axes_ylabel(self, val) : self.axes.set_ylabel(val); self.queue_draw()
def change_figure_facecolor(self, val) : self.fig.set_facecolor(val); self.queue_draw()
#END PROPERTIES FUNCTIONS
def replot_all(self) :
for line in self.lines :
line.replot()
def get_method_window(self) :
#fram = gtk.Frame()
#fram.modify_bg(gtk.STATE_NORMAL, gtk.gdk.Color(1, 1, 1))
win = gtk.VBox()
who_algn = gtk.Alignment(0.5, 0.5)
who_algn.set_property("top_padding", 10)
who_hbox = gtk.HBox(spacing=5)
who_hbox.pack_start(gtk.image_new_from_stock('aes-glancer-pie',
gtk.ICON_SIZE_BUTTON),
False)
who_hbox.pack_start(gtk.Label("Pie chart"), False)
who_algn.add(who_hbox)
win.pack_start(who_algn)
icon_table = gtk.Table(1, 5)
win.pack_start(icon_table)
# Visual Config
config_butt = gtk.Button()
config_butt.set_image(gtk.image_new_from_stock(gtk.STOCK_PAGE_SETUP,
gtk.ICON_SIZE_BUTTON))
config_butt.set_tooltip_text("Appearance preferences...")
icon_table.attach(config_butt, 0, 1, 0, 1)
config_win = gtk.Window(); config_win.set_size_request(400, -1)
config_win.set_title("Configure plot appearance")
config_vbox = self.methods_make_visual_config()
config_win.add(config_vbox); config_win.set_transient_for(self.get_aenv().toplevel)
config_butt.connect("clicked", lambda o : config_win.show())
config_remove_butt = gtk.Button("Close")
config_remove_butt.connect("clicked", lambda o : config_win.hide())
config_remove_butt.show_all()
config_hbox = gtk.HBox(); config_hbox.show()
config_hbox.pack_start(config_remove_butt, False, False, 5)
config_vbox.pack_end(config_hbox, False, False, 5)
# Import Config
legend_amtb = self.aes_method_toggle_button("legend", None,
preferencable=False)
legend_amtb.set_image(gtk.image_new_from_stock(gtk.STOCK_JUSTIFY_RIGHT,
gtk.ICON_SIZE_BUTTON))
legend_amtb.set_tooltip_text("Toggle legend")
icon_table.attach(legend_amtb, 1, 2, 0, 1)
# From Sim
sim_hbox = gtk.HBox()
#sim_cmbo = gtk.ComboBox( get_object_dictionary().get_liststore_by_am('Source') )
#sim_cllr = gtk.CellRendererText(); sim_cmbo.pack_start(sim_cllr); sim_cllr.props.ellipsize = pango.ELLIPSIZE_END;
#sim_cmbo.add_attribute(sim_cllr, 'text', 1)
#self.sim_cmbo = sim_cmbo
#sim_hbox.pack_start(sim_cmbo)
clear_butt = gtk.Button()
clear_butt.set_image(gtk.image_new_from_stock(gtk.STOCK_CLEAR,
gtk.ICON_SIZE_BUTTON))
clear_butt.set_tooltip_text("Clear all lines")
icon_table.attach(clear_butt, 0, 1, 1, 2)
clear_butt.connect("clicked", lambda o : self.check_clear(force=True))
replot_butt = gtk.Button()
replot_butt.set_image(gtk.image_new_from_stock(gtk.STOCK_REFRESH,
gtk.ICON_SIZE_BUTTON))
replot_butt.set_tooltip_text("Replot all lines")
replot_butt.connect("clicked", lambda o : self.replot_all())
icon_table.attach(replot_butt, 1, 2, 1, 2)
#fram.add(win)
win.show_all()
return win
def methods_make_visual_config(self) :
config_vbox = gtk.VBox()
config_ntbk = gtk.Notebook()
general_table_maker = PreferencesTableMaker()
general_table_maker.append_heading("Title")
general_table_maker.append_row("Title", self.aes_method_entry("title"))
general_table_maker.append_row("Title Font", self.aes_method_font_button("title_font", "Set title font"))
general_table_maker.append_heading("Colours")
general_table_maker.append_row("Face Colour", self.aes_method_colour_button("figure_facecolor", "Set figure colour"))
general_table_maker.append_row("Axes Background",self.aes_method_colour_button("axes_axis_bgcolor", "Axes Background Colour"))
config_tabl = general_table_maker.make_table()
config_tabl_vbox = gtk.VBox(); config_tabl_vbox.pack_start(config_tabl, False)
config_ntbk.append_page(config_tabl_vbox, gtk.Label("General"))
legend_table_maker = PreferencesTableMaker()
legend_table_maker.append_heading("Geometry")
legend_position_cmbo = gtk.combo_box_new_text()
for loc in legend_locs : legend_position_cmbo.append_text(loc)
self.aes_method_automate_combo_text(legend_position_cmbo, "legend_loc")
legend_table_maker.append_row("Position", legend_position_cmbo)
config_tabl = legend_table_maker.make_table()
config_tabl_vbox = gtk.VBox(); config_tabl_vbox.pack_start(config_tabl, False)
config_ntbk.append_page(config_tabl_vbox, gtk.Label("Legend"))
axes = { 'x' : "X" , 'y' : "Y" }
for axis in axes :
axes_table_maker = PreferencesTableMaker()
axes_table_maker.append_heading("Labeling")
axes_table_maker.append_row(axes[axis]+" Axes Label", self.aes_method_entry("axes_"+axis+"label"))
axes_table_maker.append_row(axes[axis]+" Axis Font", self.aes_method_font_button(axis+"label_font", "Set "+axes[axis]+" axis font"))
axes_table_maker.append_row(axes[axis]+" Tick Font", self.aes_method_font_button(axis+"tick_font", "Set "+axes[axis]+" axis font"))
axes_table_maker.append_row(axes[axis]+" Multiplier", self.aes_method_entry(axis+"multiplier", wait_until_parsable_float = True))
config_tabl = axes_table_maker.make_table()
config_tabl_vbox = gtk.VBox(); config_tabl_vbox.pack_start(config_tabl, False);
config_ntbk.append_page(config_tabl_vbox, gtk.Label(axes[axis]+" Axis"))
config_vbox.pack_start(config_ntbk)
config_vbox.show_all()
return config_vbox | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glancer/Pie.py | Pie.py |
import os, math, sys, getopt, string
from aobject.utils import debug_print
import pango
import random
from gtk import gdk
from ..tablemaker import PreferencesTableMaker
import threading
import cairo, gtk, gobject
import matplotlib
import numpy, numpy.fft
import scipy, scipy.interpolate, scipy.optimize
from matplotlib.backends.backend_cairo import RendererCairo
from matplotlib.backends.backend_gtkcairo import FigureCanvasGTKCairo as mpl_Canvas
from matplotlib.backends.backend_gtkcairo import NavigationToolbar2Cairo as mpl_Navbar
import pylab
from PIL import Image
from aobject.aobject import *
from Canvas import *
from Line import GlancerLine
from Scatter import GlancerScatter
from Legend import GlancerLegend
legend_locs = matplotlib.legend.Legend.codes
legend_locs_rev = dict((legend_locs[k],k) for k in legend_locs)
class GlancerPlotLike(AObject) :
fig = None
axes = None
canvas = None
lines = None
legend_object = None
plot_over = False
legend = True
title = ''
xlabel_font = ''
grid = get_preferencer().get_preference("GlancerPlot","grid")
if grid is None :
grid = False
grid_color = get_preferencer().get_preference("GlancerPlot","grid_color")
if grid_color is None :
grid_color = 'k'
read_series = True
scroll_speed = 0.1
_xlim_changed_conn = None
def change_time(self, time) :
self.time = time
for line in self.lines :
line.set_time(time)
def _do_xlim_changed(self, ax) :
for line in self.lines :
if line.source.needs_x_range and \
line.source.current_x_range != self.axes.get_xlim() :
line.source.source_reload()
line.replot()
def set_xlim(self, left=None, right=None) :
if left is None :
left = self.get_axes_xmin()
if right is None :
right = self.get_axes_xmax()
self.axes.set_xlim(left, right)
self.queue_draw()
def set_ylim(self, top=None, bottom=None) :
if top is None :
top = self.get_axes_ymax()
if bottom is None :
bottom = self.get_axes_ymin()
self.axes.set_ylim(bottom, top)
self.queue_draw()
def get_useful_vars(self) :
return {
'axes' : 'mpl Axes',
'fig' : 'mpl Fig',
'canvas' : 'mpl Canvas',
}
def aes_add_a(self, aname_root, **parameters) :
if aname_root == 'GlancerLine' :
source = parameters['source']
return self.load_series(source)
if aname_root == 'GlancerLegend' :
self.axes.legend(loc=None)
if self.axes.legend_ is None :
return None
return GlancerLegend(self.axes.legend_,
self.queue_draw, env=self.get_aenv()
)
return AObject.aes_add_a(self, aname_root, **parameters)
def __init__(self, aname_root, fig, queue_draw, env=None):
self.canvas = GlancerCanvas(fig)
self.queue_draw = queue_draw
self.fig = fig
self._xlim_changed_conn = self.axes.callbacks.connect('xlim_changed',
self._do_xlim_changed)
self.canvas.mpl_connect('scroll_event', self.do_mpl_scroll_event)
self.canvas.mpl_connect('button_press_event', self.do_mpl_button_press_event)
self.canvas.mpl_connect('button_release_event',
self.do_mpl_button_release_event)
self.canvas.mpl_connect('motion_notify_event',
self.do_mpl_motion_notify_event)
self.lines = []
AObject.__init__(self, aname_root, env, view_object=False)
self.time_entr = self.aes_method_entry_update('time')
self.time_entr.show_all()
def do_legend(self, loc = None) :
if len(self.lines) > 0 and self.legend :
self.axes.legend(loc=loc)
if self.legend_object is not None :
self.legend_object.aes_remove()
self.legend_object = None
self.legend_font_holder.remove(self.legend_font_holder.get_child())
if self.axes.legend_ is None :
return None
self.legend_object = GlancerLegend(self.axes.legend_,
self.queue_draw, env = self.get_aenv())
self.legend_font_holder.add(\
self.legend_object.aes_method_font_button("font",
"Set legend font"))
self.legend_font_holder.show_all()
self.absorb_properties(self.legend_object, as_self = False)
if loc==None : self.emit_property_change("legend_loc")
else : self.axes.legend_ = None
self.canvas.draw()
def check_legend(self) :
if len(self.lines) == 0 and self.legend_object :
self.legend_object.aes_remove()
self.legend_object = None
def check_clear(self, force = False) :
if not self.plot_over or force :
self.axes.clear()
for line in self.lines :
line.self_remove(parent_remove = False)
self.lines = []
self.check_legend()
self.queue_draw()
def update_canvas_size(self) :
w, h = self.fig.get_size_inches()
dpi = self.fig.get_dpi()
h *= dpi
w *= dpi
self.fig.canvas.resize(int(w), int(h))
self.fig.canvas.set_size_request(int(w), int(h))
self.queue_draw()
def replot_all(self) :
for line in self.lines :
line.replot()
class GlancerPlot(GlancerPlotLike) :
max_dim = 2
def __init__(self, fig, queue_draw, env=None):
self.axes = fig.add_subplot(1,1,1)
GlancerPlotLike.__init__(self, "GlancerPlot", fig, queue_draw, env=env)
def load_series(self, source, series = None, vals = None):
resolution = self.get_resolution()
if source is None :
mpl_line, = self.axes.plot(series, vals)
line = GlancerLine(self, line=mpl_line, resolution=resolution,
env=self.get_aenv(), time=self.get_time())
elif source.source_type() == 'line' :
line = GlancerLine(self, source=source, axes=self.axes,
read_series=self.read_series,
resolution=resolution,
env=self.get_aenv(), time=self.get_time())
else :
line = GlancerScatter(self, source=source, axes=self.axes,
read_series=self.read_series,
env=self.get_aenv())
self.lines.append(line)
self.absorb_properties(line, as_self = False)
self.do_legend()
self.update_canvas_size()
self.time_entr.get_parent().set_visible(source.get_time_cols() is not None)
return line
#PROPERTIES
def get_auto_aesthete_properties(self) :
return {
'grid' : ( bool, ),
'legend' : (bool, ),
'grid_color' : (string_to_float_tup, ),
'plot_over' : (bool, ),
'title' : (str, ),
'xmultiplier' : (float,),
'ymultiplier' : (float,),
'read_series' : (bool,),
'resolution' : (int,),
'legend_loc' : (str,),
'axes_axis_bgcolor' : (string_to_float_tup,),
'figure_facecolor' : (string_to_float_tup,),
'axes_xmin' : (float,),
'axes_xlabel' : (str,),
'axes_xmax' : (float,),
'axes_ymin' : (float,),
'axes_ylabel' : (str,),
'axes_ymax' : (float,),
'xhide_oom' : (bool,),
'yhide_oom' : (bool,),
'xtick_font' : (str,),
'ytick_font' : (str,),
'xlabel_font' : (str,),
'ylabel_font' : (str,),
'title_font' : (str,),
'size_inches' : (string_to_float_tup,),
'subplots_region' : (string_to_float_tup,),
'time' : (string_to_float_tup, (AOBJECT_CAN_NONE,)),
'time_args' : (int, (AOBJECT_CAN_NONE,)),
}
#BEGIN PROPERTIES FUNCTIONS
def get_subplots_region(self) :
spp = self.fig.subplotpars
return (spp.left, spp.bottom, spp.right, spp.top)
def change_subplots_region(self, val) :
self.fig.subplots_adjust(*val)
self.queue_draw()
def get_size_inches(self) :
return tuple(self.fig.get_size_inches())
def change_size_inches(self, val) :
self.fig.set_size_inches(*val)
self.update_canvas_size()
def change_legend(self, val) :
self.legend = val
self.do_legend()
def change_grid(self, val) :
self.grid = val
if self.grid :
self.axes.grid(color=self.grid_color)
else :
self.axes.grid()
self.queue_draw()
def get_grid_color(self): return mpl_to_tuple(self.grid_color)
def change_grid_color(self, val) :
self.grid_color = val
if self.grid :
self.axes.grid(b=self.grid, color=val)
self.queue_draw()
def get_xmultiplier(self) : return 1.
def get_ymultiplier(self) : return 1.
def get_resolution(self) :
if self.lines == [] :
res_check = get_preferencer().get_preference("GlancerPlot","resolution")
if res_check is not None :
return int(res_check)
else :
return 10
return self.lines[0].get_resolution()
def get_legend_loc(self) :
return (legend_locs_rev[self.legend_object.get_loc()] if self.legend_object else '')
def get_axes_axis_bgcolor(self):
return mpl_to_tuple(self.axes.get_axis_bgcolor())
def get_figure_facecolor(self):
return mpl_to_tuple(self.fig.get_facecolor())
def get_axes_ylabel(self) : return self.axes.get_ylabel()
def get_axes_ymin(self) : return self.axes.get_ylim()[0]
def get_axes_ymax(self) : return self.axes.get_ylim()[1]
def get_axes_xlabel(self) : return self.axes.get_xlabel()
def get_axes_xmin(self) : return self.axes.get_xlim()[0]
def get_axes_xmax(self) : return self.axes.get_xlim()[1]
def get_xhide_oom(self) :
return False
def get_yhide_oom(self) :
return False
def get_xtick_font(self) :
tick_props = self.axes.get_xaxis().get_major_ticks()[0].label1.get_fontproperties()
return mpl_to_font(tick_props)
def get_ytick_font(self) :
tick_props = self.axes.get_yaxis().get_major_ticks()[0].label1.get_fontproperties()
return mpl_to_font(tick_props)
def get_xlabel_font(self) :
label_props = self.axes.get_xaxis().get_label().get_fontproperties()
return mpl_to_font(label_props)
def get_ylabel_font(self) :
label_props = self.axes.get_yaxis().get_label().get_fontproperties()
return mpl_to_font(label_props)
def get_title_font(self) :
label_props = self.axes.title.get_fontproperties()
return mpl_to_font(label_props)
#These are exactly as Aesthete will define them...
#def get_plot_over(self): return self.plot_over if val==None else (val=='True')
#def change_plot_over(self, val): self.plot_over = val
#def change_read_series(self, val) : self.read_series = val
def change_legend_loc(self, val) : self.do_legend(loc = val)
def change_title(self, val) :
self.title = val
self.axes.set_title(self.title, visible = (self.title!=''))
self.queue_draw()
def change_resolution(self, val) :
for line in self.lines :
line.set_resolution(val)
self.queue_draw()
def change_xhide_oom(self, val) :
self.axes.get_xaxis().major.formatter.hide_oom = val
self.queue_draw()
def change_yhide_oom(self, val) :
self.axes.get_yaxis().major.formatter.hide_oom = val
self.queue_draw()
def change_ytick_font(self, val) :
ticks = self.axes.get_yaxis().get_major_ticks()
for tick in ticks :
font_to_mpl(tick.label1.get_fontproperties(), val)
self.queue_draw()
def change_xtick_font(self, val) :
ticks = self.axes.get_xaxis().get_major_ticks()
for tick in ticks :
font_to_mpl(tick.label1.get_fontproperties(), val)
self.queue_draw()
def change_xlabel_font(self, val) :
label_props = self.axes.get_xaxis().get_label().get_fontproperties()
font_to_mpl(label_props, val)
self.queue_draw()
def change_ylabel_font(self, val) :
label_props = self.axes.get_yaxis().get_label().get_fontproperties()
font_to_mpl(label_props, val)
self.queue_draw()
def change_title_font(self, val) :
label_props = self.axes.title.get_fontproperties()
font_to_mpl(label_props, val)
self.queue_draw()
def change_xmultiplier(self) : self.axes.xaxis.get_major_formatter().multiplier = val; self.queue_draw()
def change_ymultiplier(self) : self.axes.yaxis.get_major_formatter().multiplier = val; self.queue_draw()
def change_axes_axis_bgcolor(self, val) : self.axes.set_axis_bgcolor(val); self.queue_draw()
def change_axes_xlabel(self, val) : self.axes.set_xlabel(val); self.queue_draw()
def change_axes_xmin(self, val) : self.set_xlim(left=val); self.queue_draw()
def change_axes_xmax(self, val) : self.set_xlim(right=val); self.queue_draw()
def change_axes_ylabel(self, val) : self.axes.set_ylabel(val); self.queue_draw()
def change_axes_ymin(self, val) : self.set_ylim(bottom=val); self.queue_draw()
def change_axes_ymax(self, val) : self.set_ylim(top=val); self.queue_draw()
def change_figure_facecolor(self, val) : self.fig.set_facecolor(val); self.queue_draw()
#END PROPERTIES FUNCTIONS
def get_method_window(self) :
#fram = gtk.Frame()
#fram.modify_bg(gtk.STATE_NORMAL, gtk.gdk.Color(1, 1, 1))
win = gtk.VBox()
who_algn = gtk.Alignment(0.5, 0.5)
who_algn.set_property("top_padding", 10)
who_hbox = gtk.HBox(spacing=5)
who_hbox.pack_start(gtk.image_new_from_stock('aes-glancer-plot2d',
gtk.ICON_SIZE_BUTTON),
False)
who_hbox.pack_start(gtk.Label("2D Plot"), False)
who_algn.add(who_hbox)
win.pack_start(who_algn)
icon_table = gtk.Table(1, 5)
win.pack_start(icon_table)
# Visual Config
config_butt = gtk.Button()
config_butt.set_image(gtk.image_new_from_stock(gtk.STOCK_PAGE_SETUP,
gtk.ICON_SIZE_BUTTON))
config_butt.set_tooltip_text("Appearance preferences...")
icon_table.attach(config_butt, 0, 1, 0, 1)
config_vbox = self.methods_make_visual_config()
config_vbox.aes_title = "Configure plot appearance"
config_butt.connect("clicked", lambda o :
self.env.action_panel.to_action_panel(config_vbox))
# Import Config
legend_amtb = self.aes_method_toggle_button("legend", None,
preferencable=False)
legend_amtb.set_image(gtk.image_new_from_stock(gtk.STOCK_JUSTIFY_RIGHT,
gtk.ICON_SIZE_BUTTON))
legend_amtb.set_tooltip_text("Toggle legend")
icon_table.attach(legend_amtb, 1, 2, 0, 1)
plot_over_amtb = self.aes_method_toggle_button("plot_over", None,
preferencable=False)
plot_over_amtb.set_image(gtk.image_new_from_stock(gtk.STOCK_DND_MULTIPLE,
gtk.ICON_SIZE_BUTTON))
plot_over_amtb.set_tooltip_text("Toggle overlay of new plots")
icon_table.attach(plot_over_amtb, 2, 3, 0, 1)
# From Sim
sim_hbox = gtk.HBox()
#sim_cmbo = gtk.ComboBox( get_object_dictionary().get_liststore_by_am('Source') )
#sim_cllr = gtk.CellRendererText(); sim_cmbo.pack_start(sim_cllr); sim_cllr.props.ellipsize = pango.ELLIPSIZE_END;
#sim_cmbo.add_attribute(sim_cllr, 'text', 1)
#self.sim_cmbo = sim_cmbo
#sim_hbox.pack_start(sim_cmbo)
clear_butt = gtk.Button()
clear_butt.set_image(gtk.image_new_from_stock(gtk.STOCK_CLEAR,
gtk.ICON_SIZE_BUTTON))
clear_butt.set_tooltip_text("Clear all lines")
icon_table.attach(clear_butt, 3, 4, 0, 1)
clear_butt.connect("clicked", lambda o : self.check_clear(force=True))
replot_butt = gtk.Button()
replot_butt.set_image(gtk.image_new_from_stock(gtk.STOCK_REFRESH,
gtk.ICON_SIZE_BUTTON))
replot_butt.set_tooltip_text("Replot all lines")
replot_butt.connect("clicked", lambda o : self.replot_all())
icon_table.attach(replot_butt, 4, 5, 0, 1)
#fram.add(win)
win.show_all()
return win
def methods_make_visual_config(self) :
config_vbox = gtk.VBox()
config_ntbk = gtk.Notebook()
general_table_maker = PreferencesTableMaker()
general_table_maker.append_heading("Title")
general_table_maker.append_row("Text",
self.aes_method_entry_update("title"))
general_table_maker.append_row("Font", self.aes_method_font_button("title_font", "Set title font"))
general_table_maker.append_heading("Geometry")
general_table_maker.append_row("Size (in)",
self.aes_method_tuple_entry_update("size_inches"))
general_table_maker.append_row("Plot area",
self.aes_method_tuple_entry_update("subplots_region"))
general_table_maker.append_heading("Colours")
general_table_maker.append_row("Face", self.aes_method_colour_button("figure_facecolor", "Set figure colour"))
general_table_maker.append_row("Axes",self.aes_method_colour_button("axes_axis_bgcolor", "Axes Background Colour"))
general_table_maker.append_heading("Detail")
general_table_maker.append_row("Resolution",
self.aes_method_entry_update("resolution"),
tip="Only applies to continuous Sources")
general_table_maker.append_heading("Grid")
general_table_maker.append_row("Show Grid", self.aes_method_check_button("grid", None))
general_table_maker.append_row("Grid Colour",self.aes_method_colour_button("grid_color", "Grid Colour"))
config_tabl = general_table_maker.make_table()
config_tabl_vbox = gtk.VBox(); config_tabl_vbox.pack_start(config_tabl, False)
config_ntbk.append_page(config_tabl_vbox, gtk.Label("General"))
legend_table_maker = PreferencesTableMaker()
legend_table_maker.append_heading("Geometry")
legend_position_cmbo = gtk.combo_box_new_text()
for loc in legend_locs : legend_position_cmbo.append_text(loc)
self.aes_method_automate_combo_text(legend_position_cmbo, "legend_loc")
legend_table_maker.append_row("Position", legend_position_cmbo)
legend_table_maker.append_heading("Appearance")
self.legend_font_holder = gtk.Frame()
legend_table_maker.append_row(" Font", self.legend_font_holder)
config_tabl = legend_table_maker.make_table()
config_tabl_vbox = gtk.VBox(); config_tabl_vbox.pack_start(config_tabl, False)
config_ntbk.append_page(config_tabl_vbox, gtk.Label("Legend"))
axes = { 'x' : "X" , 'y' : "Y" }
for axis in axes :
axes_table_maker = PreferencesTableMaker()
axes_table_maker.append_heading("Labeling")
axes_table_maker.append_row(" Label",
self.aes_method_entry_update("axes_"+axis+"label"))
axes_table_maker.append_row(" Axis",
self.aes_method_font_button(axis+"label_font", "Set "+axes[axis]+" axis font"))
axes_table_maker.append_row(" Tick", self.aes_method_font_button(axis+"tick_font", "Set "+axes[axis]+" axis font"))
axes_table_maker.append_heading("Data Limits")
axes_table_maker.append_row(" Minimum Value",
self.aes_method_entry_update("axes_"+axis+"min"))
axes_table_maker.append_row(" Maximum Value",
self.aes_method_entry_update("axes_"+axis+"max"))
config_tabl = axes_table_maker.make_table()
config_tabl_vbox = gtk.VBox(); config_tabl_vbox.pack_start(config_tabl, False);
config_ntbk.append_page(config_tabl_vbox, gtk.Label(axes[axis]+" Axis"))
config_vbox.pack_start(config_ntbk)
config_vbox.show_all()
return config_vbox
def do_mpl_scroll_event(self, event) :
'''Handle scrolling ourselves.'''
if event.inaxes != self.axes :
return False
self.axes.set_autoscale_on(False)
xl = self.axes.get_xlim()
yl = self.axes.get_ylim()
ec = (event.xdata, event.ydata)
# event.step tells direction
spd = (1+self.scroll_speed) ** (-event.step)
# unfortunately, this seems to be the only sensible way to
# get to the modifiers. Phrased oddly, but says do_x if we're
# not told to only do y, and v.v.
do_specific = event.guiEvent.state & gtk.gdk.CONTROL_MASK
do_x = not (do_specific and (event.guiEvent.state & gtk.gdk.SHIFT_MASK))
do_y = not (do_specific and do_x)
if do_x :
self.axes.set_xlim(ec[0] - (ec[0]-xl[0])*spd,
ec[0] - (ec[0]-xl[1])*spd)
if do_y :
self.axes.set_ylim(ec[1] - (ec[1]-yl[0])*spd,
ec[1] - (ec[1]-yl[1])*spd)
self.queue_draw()
return True
_move_from = None
_move_from_xl = None
_move_from_yl = None
def do_mpl_button_press_event(self, event) :
'''Check button presses.'''
if event.inaxes != self.axes :
return False
m_control = event.guiEvent.state & gtk.gdk.CONTROL_MASK
if event.button == 2 or event.button == 1 :
if m_control :
self.axes.autoscale_view()
self.axes.set_autoscale_on(True)
self.queue_draw()
else :
self.axes.set_autoscale_on(False)
self._move_from = (event.x, event.y)
self._move_from_xl = self.axes.get_xlim()
self._move_from_yl = self.axes.get_ylim()
self.queue_draw()
return True
return False
def do_mpl_button_release_event(self, event) :
'''Check button releases.'''
if event.button == 2 or event.button == 1:
self._move_from = None
self._move_from_xl = None
self._move_from_yl = None
self.queue_draw()
return True
return False
def do_mpl_motion_notify_event(self, event) :
'''Check motion notifications.'''
if event.inaxes != self.axes :
return False
do_specific = event.guiEvent.state & gtk.gdk.CONTROL_MASK
do_x = not (do_specific and (event.guiEvent.state & gtk.gdk.SHIFT_MASK))
do_y = not (do_specific and do_x)
if self._move_from is not None :
dx = (event.x-self._move_from[0])
dy = (event.y-self._move_from[1])
l,b,r,t = self.axes.bbox.extents
el,er = self.axes.get_xlim()
eb,et = self.axes.get_ylim()
dx = dx*(er-el)/(r-l)
dy = dy*(et-eb)/(t-b)
if do_x :
self.axes.set_xlim(self._move_from_xl[0]-dx,
self._move_from_xl[1]-dx)
if do_y :
self.axes.set_ylim(self._move_from_yl[0]-dy,
self._move_from_yl[1]-dy)
self.queue_draw()
return True | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glancer/Plot.py | Plot.py |
from ..glypher.Widget import GlyphBasicGlypher
from aobject.utils import debug_print
from ..glypher.PhraseGroup import GlypherBracketedPhrase
import gtk
from aobject.aobject import *
from ..glypher.Word import make_word
from sympy.core.symbol import Symbol
from ..sources.Source import Source
class GlypherSourceReference(GlypherBracketedPhrase) :
source = None
def __init__(self, parent, source, name) :
GlypherBracketedPhrase.__init__(self, parent, bracket_shapes=('[',']'), auto=False)
self.mes.append('reference')
self.mes.append('source_reference')
self.source = source
obj = get_object_from_dictionary(source)
source_name = make_word(name, self)
source_name.set_bold(True)
source_name.set_auto_italicize(False)
source_name.set_italic(False)
self.get_target('expression').adopt(source_name)
self.set_recommending(self["expression"])
self['expression'].set_font_size_scaling(0.6)
source_name.set_enterable(False)
self.get_target('expression').IN().set_enterable(False)
self.set_rgb_colour([0.5, 0.4, 0])
def get_sympy(self) :
return Symbol('aesthete_source_'+self.source)
class Gluer(gtk.Frame, AObject) :
line_height = 35.
default_height = 560
default_width = 360
def __init__(self, env = None):
gtk.Frame.__init__(self)
AObject.__init__(self, "Gluer", env, view_object=True)
self.connect("aesthete-property-change", lambda o, p, v, a : self.queue_draw())
self.vbox = gtk.VBox()
self.sources = []
self.gmg = GlyphBasicGlypher(env=env, evaluable=False)
self.gmg.main_phrase.move()
self.gmg.main_phrase.set_enterable(True)
self.gmg.main_phrase.set_attachable(True)
self.gmg.main_phrase.line_length = self.default_width
self.absorb_properties(self.gmg)
self.vbox.pack_start(self.gmg)
self.source_action = self.insert_source_ref
self.add(self.vbox)
self.show_all()
self.log(1, "New Gluer initalized")
sources = None
def insert_source_ref(self, source) :
self.sources.append(source)
obj = get_object_from_dictionary(source)
ref = GlypherSourceReference(None, str(len(self.sources)-1),
obj.get_aname_nice())
self.gmg.caret.insert_entity(ref)
self.gmg.grab_focus()
if self.base_cmbo.get_active_iter() is None :
mdl = self.base_cmbo.get_model()
it = mdl.get_iter_first()
while it is not None and \
mdl.get_value(it,0) != source :
it = mdl.iter_next(it)
if it is not None :
self.base_cmbo.set_active_iter(it)
def get_method_window(self) :
win = gtk.VBox()
make_hbox = gtk.HBox()
make_hbox.set_spacing(5)
make_entr = gtk.Entry()
make_entr.set_tooltip_text("Name for new Source")
make_entr.set_width_chars(10)
make_hbox.pack_start(make_entr)
make_butt = gtk.Button()
make_butt.set_tooltip_text("Generate Source")
make_butt.set_image(gtk.image_new_from_stock(gtk.STOCK_GOTO_LAST,
gtk.ICON_SIZE_BUTTON))
make_butt.connect("clicked", lambda b : self.make_new_source(make_entr.get_text()))
make_hbox.pack_start(make_butt, False)
make_hbox.pack_start(gtk.Label('Base'), False)
base_cmbo = gtk.ComboBox( get_object_dictionary().get_liststore_by_am('Source') )
base_cmbo.set_size_request(100, -1)
make_hbox.pack_start(base_cmbo)
base_cllr = gtk.CellRendererText(); base_cmbo.pack_start(base_cllr); base_cllr.props.ellipsize = pango.ELLIPSIZE_END;
base_cmbo.add_attribute(base_cllr, 'text', 1)
self.base_cmbo = base_cmbo
win.pack_start(make_hbox, False)
icon_table = gtk.Table(1, 1)
sim_butt = gtk.Button()
sim_butt.set_image(gtk.image_new_from_stock(gtk.STOCK_INDEX,
gtk.ICON_SIZE_BUTTON))
sim_butt.set_tooltip_text("Reference currently selected Source")
sim_butt.set_sensitive(False)
icon_table.attach(sim_butt, 0, 1, 0, 1)
sim_butt.connect("clicked", lambda o : self.insert_source_ref(\
get_object_dictionary().selected_source))
get_object_dictionary().connect(\
'aesthete-selected-source-change',
lambda tr : sim_butt.set_sensitive(True))
win.pack_start(icon_table, False)
win.show_all()
return win
def make_new_source(self, aname_nice) :
sy = self.gmg.get_sympy()
base_source = self.base_cmbo.get_active_iter()
if base_source is None :
return
base_source = \
self.base_cmbo.get_model().get_value(base_source, 0)
cs = ComposedSource('composed', sy, sources=self.sources,
base_source=base_source)
cs.set_aname_nice(aname_nice)
self.gmg.grab_focus()
tol = 1e-7
class ComposedSource(Source) :
resolution = None
sympy_object = None
def __init__(self, stem, sympy_object, sources, base_source, env = None, show = False, reloadable = False, resolution = 300) :
self.resolution = resolution
self.sympy_object = sympy_object
self.sources = sources
self.base_source = base_source
Source.__init__(self, stem, env, show, reloadable)
def source_type(self) :
return get_object_from_dictionary(self.base_source).source_type()
def source_get_values(self, time = None, multi_array = False, x_range =
(0,1), resolution = 300) :
debug_print(self.sympy_object.args)
debug_print(self.sympy_object)
sub_dict = {}
for arg in list(self.sympy_object.atoms(Symbol)) :
if str(arg).startswith('aesthete_source_') :
name = str(arg)[len('aesthete_source_'):]
sub_dict[name] =\
get_object_from_dictionary(self.sources[int(name)])
if len(sub_dict) == 0:
raise RuntimeError('Need some other Source!')
basis_source = get_object_from_dictionary(self.base_source)
xa = basis_source.source_get_values(time=time, multi_array=False)
value_dict = {}
for source in sub_dict :
value_dict[source] = sub_dict[source].source_get_values(time=time,
multi_array=False)
p = []
xarr = []
for source in value_dict :
if len(value_dict[source]) < len(xa) :
raise RuntimeError( \
"""Components don\'t match!
(length %d for %s < %d for %s)""" % \
(len(value_dict[source]), sub_dict[source].get_aname_nice(),
len(xa), basis_source.get_aname_nice()))
for i in range(0, len(xa)) :
x = xa[i][0]
sub_val_dict = {'x':x}
for source in value_dict :
pair = value_dict[source][i]
if abs(pair[0]-x) > tol :
raise RuntimeError(
"""Components don\'t match!
(%lf for %s vs %lf for %s at point %d)""" %\
(pair[0], sub_dict[source].get_aname_nice(),
x, basis_source.get_aname_nice()))
sub_val_dict['aesthete_source_'+source] = pair[1]
y = self.sympy_object.subs(sub_val_dict)
p.append(float(y))
xarr.append(x)
if multi_array :
return [{'values':p,'x':xarr,'name':self.get_aname_nice()}]
return p
def source_get_max_dim(self) :
return 2 | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/gluer/Gluer.py | Gluer.py |
import gtk
from matplotlib.backends.backend_cairo import RendererCairo
import pangocairo
from aobject.utils import *
import pango
import gobject
from .. import glypher
import copy
from lxml import etree
import cairo
from aobject import aobject
from aobject.paths import *
from ..tablemaker import *
import rsvg
import StringIO
from GlosserWidget import *
class GlosserVBox(GlosserWidget) :
ty = "vbox"
container = True
contained = None
def get_contained(self) :
return self.contained
def remove_from_layouts(self) :
GlosserWidget.remove_from_layouts(self)
for widget in self.contained :
widget.remove_from_layouts()
def restore_to_layouts(self) :
GlosserWidget.restore_to_layouts(self)
for widget in self.contained :
widget.restore_to_layouts()
def __init__(self, slide, design_layout, presentation_layout,
name_root='GlosserWidget', env=None) :
if self.design_widget is None :
self.design_widget = gtk.DrawingArea()
if self.presentation_widget is None :
self.presentation_widget = gtk.DrawingArea()
self.contained = []
GlosserWidget.__init__(self,
slide,
design_layout,
presentation_layout,
name_root='GlosserWidget',
env=env)
def get_action_panel(self) :
return None
def do_presentation_draw(self, cr, scaling=None) :
pass
def append(self, glosser_widget) :
self.contained.append(glosser_widget)
glosser_widget.design_widget.connect_after("size-allocate", self.do_widget_pos_update)
basic_offset_x = 0
def do_widget_pos_update(self, widget, req) :
y = 0
layout = self.layouts[0]
w = 0
for widget in self.contained :
offset = 0
#if l[3] == 'left' :
# offset = 0
#elif l[3] == 'centre' :
# offset = layout.body_w-widget.w
# offset *= 0.5
#elif l[3] == 'right' :
# offset = layout.body_w-widget.w
new_x = int(self.x+offset+self.basic_offset_x)
new_y = self.y+y
w = max(w, widget.w)
if abs(new_x-widget.x) > 5 or abs(new_y-widget.y) > 5:
widget.move(new_x, new_y)
y += widget.h
h = y
self.resize(w, h)
def presentation_draw(self, cr, scaling=None, ignore_spronks=True,
final=None) :
ret = GlosserWidget.presentation_draw(self, cr, scaling=scaling,
ignore_spronks=ignore_spronks,
final=final)
if not ret :
return False
for widget in self.subwidgets :
if hasattr(widget, 'draw') :
cr.save()
widget.draw(cr, scaling)
cr.restore()
for widget in self.contained :
cr.save()
cr.translate(widget.x-self.x, widget.y-self.y)
widget.presentation_draw(cr, scaling=scaling,
ignore_spronks=ignore_spronks, final=final)
cr.restore()
return True
class GlosserBullet_(gtk.DrawingArea) :
scaling = 1
draw_fn = None
get_contained = None
__gsignals__ = { "expose-event" : "override"}
def __init__(self, container, presentation=False) :
gtk.DrawingArea.__init__(self)
self.get_contained = container.get_contained
self.container = container
self.presentation = presentation
def rescale(self, rat) :
self.scaling = rat
self.queue_draw()
def do_expose_event(self, event):
cr = self.window.cairo_create()
cr.rectangle ( event.area.x, event.area.y, event.area.width, event.area.height)
cr.clip()
self.draw(cr)
def draw(self, cr, scaling=None) :
if scaling is None :
scaling = self.scaling
cr.scale(scaling, scaling)
cr.set_source_rgba(.5, .5, .5, 1.)
for widget in self.get_contained() :
if self.presentation and not widget.presentation_widget.get_visible() :
continue
cr.save()
middle = int(widget.y-self.container.y+.5*widget.h)
cr.arc(5, middle, 5, 0, 2*3.14159)
cr.close_path()
cr.fill()
cr.restore()
class GlosserBullets(GlosserVBox) :
ty = "bullets"
def __init__(self, slide, design_layout, presentation_layout, env=None) :
self.design_widget = GlosserBullet_(self, presentation=False)
self.presentation_widget = GlosserBullet_(self, presentation=True)
self.basic_offset_x = 20
GlosserVBox.__init__(self, slide, design_layout, presentation_layout, env=env,
name_root='GlosserBullets')
def rescale_action(self, subwidget, rat) :
self.subwidgets[subwidget].rescale(rat)
def append(self, glosser_widget) :
GlosserVBox.append(self, glosser_widget)
glosser_widget.presentation_widget.connect("show",
lambda w :
self.presentation_widget.queue_draw())
glosser_widget.presentation_widget.connect("hide",
lambda w :
self.presentation_widget.queue_draw()) | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glosser/Containers.py | Containers.py |
import gtk
from matplotlib.backends.backend_cairo import RendererCairo
import pangocairo
from aobject.utils import *
import pango
import gobject
from .. import glypher
import copy
from lxml import etree
import cairo
from aobject import aobject
from aobject.paths import *
from ..tablemaker import *
import rsvg
import StringIO
from GlosserWidget import *
class GlosserGlyphEntry(GlosserWidget) :
ty = "glyphentry"
initial_font_size = 45
def get_auto_aesthete_properties(self):
return {
'font_size' : (float,),
}
def get_font_size(self) :
return self.design_widget.get_font_size()
def change_font_size(self, val) :
self.design_widget.set_font_size(val)
def make_action_panel(self) :
glyph_table_maker = PreferencesTableMaker()
glyph_table_maker.append_row("Font size",
self.aes_method_entry_update('font_size'))
win = glyph_table_maker.make_table()
win.aes_title = "Glancer View"
win.show_all()
return win
def __init__(self, slide, design_layout, presentation_layout, env=None) :
self.design_widget = glypher.Widget.GlyphEntry(resize_to_main_phrase=True,
margins=[10,10,10,10])
self.presentation_widget =\
GlosserPresentationImage(self.presentation_draw)
GlosserWidget.__init__(self,
slide,
design_layout,
presentation_layout,
name_root="GlosserGlyphEntry",
env=env)
self.action_panel = self.make_action_panel()
self.design_widget.connect("focus-in-event", lambda w, e :
self.get_aenv().action_panel.to_action_panel(self.action_panel))
def rescale_action(self, subwidget, rat) :
if subwidget == GLOSSER_WIDGET_DESIGN :
self.design_widget.set_font_size(int(self.initial_font_size)*rat)
elif subwidget == GLOSSER_WIDGET_PRESENTATION :
self.presentation_widget.rescale(rat)
def do_presentation_draw(self, cr, scaling=None, final=False) :
cr.scale(1/self.current_scaling[1], 1/self.current_scaling[1])
self.design_widget.draw(cr,
self.w,
self.h) | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glosser/Glypher.py | Glypher.py |
import gtk
from matplotlib.backends.backend_cairo import RendererCairo
import pangocairo
from aobject.utils import *
import pango
import gobject
from .. import glypher
import copy
from lxml import etree
import cairo
from aobject import aobject
from aobject.paths import *
from ..tablemaker import *
from aobject.aobject import *
import rsvg
import StringIO
from GlosserWidget import *
class GlosserGlancer(GlosserWidget) :
ty = "glancer"
glancer = None
#PROPERTIES
def get_auto_aesthete_properties(self):
return {
'glancer' : (str, (AOBJECT_CAN_NONE,)),
'dpi' : (int,),
}
#BEGIN PROPERTIES FUNCTIONS
def get_dpi(self) :
return self.design_widget.dpi
def change_dpi(self, val) :
self.design_widget.dpi = val
self.design_widget.check_canvas_size()
self.emit('redraw-request')
def change_glancer(self, val) :
if val == '' :
val = None
if val != self.glancer :
if self.glancer is not None :
glancer = aobject.get_object_from_dictionary(self.glancer)
glancer.plotter.canvas.disconnect(self.glancer_conn)
glancer.plotter.canvas.disconnect(self.glancer_changed_conn)
elif val is not None :
glancer = aobject.get_object_from_dictionary(val)
self.glancer_conn =\
glancer.plotter.canvas.connect("size-allocate", lambda w, e:
self.design_widget.check_canvas_size())
self.glancer_changed_conn =\
glancer.plotter.canvas.connect("event", lambda w, e:
self.design_widget.set_recache_queue())
self.glancer = val
self.design_widget.glancer = val
self.design_widget.check_canvas_size()
self.design_widget.queue_draw()
self.presentation_widget.queue_draw()
self.emit('redraw-request')
#END PROPERTIES FUNCTIONS
def set_glancer(self, glancer) :
self.change_property('glancer', glancer)
self.rescale(1, self.current_scaling[1])
def __init__(self, slide, design_layout, presentation_layout, env=None) :
self.design_widget = GlosserPresentationGlancer()
self.presentation_widget =\
GlosserPresentationImage(self.do_presentation_draw)
self.design_widget.connect("focus-in-event", lambda w, e :
self.get_aenv().action_panel.to_action_panel(self.action_panel))
GlosserWidget.__init__(self,
slide,
design_layout,
presentation_layout,
name_root='GlosserGlancer',
env=env)
self.action_panel = self.make_action_panel()
def make_action_panel(self) :
win = gtk.VBox()
line_table_maker = PreferencesTableMaker()
glancer_cmbo = gtk.ComboBox(\
aobject.get_object_dictionary().get_liststore_by_am('Glancer') )
glancer_cllr = gtk.CellRendererText(); glancer_cmbo.pack_start(glancer_cllr)
self._glancer_cmbo = glancer_cmbo
glancer_cmbo.add_attribute(glancer_cllr, 'text', 1)
glancer_cmbo.connect("changed", lambda o : \
self.set_glancer(glancer_cmbo.get_active_text()))
line_table_maker.append_row("Plot", glancer_cmbo)
line_table_maker.append_row("DPI", self.aes_method_entry_update('dpi'))
win.aes_title = "Glancer View"
win.pack_start(line_table_maker.make_table())
win.show_all()
return win
def rescale_action(self, subwidget, rat) :
if subwidget == GLOSSER_WIDGET_PRESENTATION :
self.presentation_widget.rescale(rat)
def do_presentation_draw(self, cr, scaling=None, final=False) :
cr.scale(1/self.current_scaling[1], 1/self.current_scaling[1])
self.design_widget.presentation_draw(cr, final=final)
def get_action_panel(self) :
return self.action_panel
class GlosserPresentationGlancer(gtk.DrawingArea) :
__gsignals__ = { "expose-event" : "override", "key-release-event" : "override" ,
"redraw-request" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, () )}
glancer = None
default_blank_size = 100
basic_canvas_scaling = 1
dpi = 30
canvas_width = None
canvas_height = None
def do_key_release_event(self, event):
keyname = gtk.gdk.keyval_name(event.keyval)
m_control = bool(event.state & gtk.gdk.CONTROL_MASK)
m_shift = bool(event.state & gtk.gdk.SHIFT_MASK)
m_alt = bool(event.state & gtk.gdk.MOD1_MASK)
m_super = bool(event.state & gtk.gdk.SUPER_MASK)
return False
def __init__(self, env=None) :
gtk.DrawingArea.__init__(self)
self.add_events(gtk.gdk.KEY_RELEASE_MASK)
self.set_property("can-focus", True)
def do_expose_event(self, event):
cr = self.window.cairo_create()
cr.rectangle ( event.area.x, event.area.y, event.area.width, event.area.height)
cr.clip()
self.presentation_draw(cr)
cache = None
def check_canvas_size(self) :
if self.glancer is not None :
glancer = aobject.get_object_from_dictionary(self.glancer)
if glancer is not None and glancer.plotter is not None :
canvas = glancer.plotter.canvas
w1, h1 = glancer.plotter.fig.get_size_inches()
w1 *= self.dpi*self.basic_canvas_scaling
h1 *= self.dpi*self.basic_canvas_scaling
if self.canvas_width != w1 or self.canvas_height != h1 :
self.canvas_width = w1
self.canvas_height = h1
self.set_size_request(int(self.canvas_width),
int(self.canvas_height))
self.cache = cairo.ImageSurface(cairo.FORMAT_ARGB32,
int(w1/self.basic_canvas_scaling),
int(h1/self.basic_canvas_scaling))
self.recache_canvas()
return
else :
self.set_size_request(self.default_blank_size, self.default_blank_size)
self.canvas_width = None
self.canvas_height = None
def set_recache_queue(self) :
self.recache_queue=True
recache_queue = False
def recache_canvas(self, cr=None) :
self.recache_queue = False
if self.glancer is not None :
glancer = aobject.get_object_from_dictionary(self.glancer)
if glancer is not None and glancer.plotter is not None :
canvas = glancer.plotter.canvas
w1, h1 = glancer.plotter.fig.get_size_inches()
w1 *= self.dpi
h1 *= self.dpi
#w1, h1 = glancer.plotter.canvas.get_width_height()
if cr is None :
if self.cache is None :
self.cache = cairo.ImageSurface(cairo.FORMAT_ARGB32, w1, h1)
cr = cairo.Context(self.cache)
cr.save()
rat = canvas.figure.dpi/float(self.dpi)
cr.scale(1/rat, 1/rat)
renderer = RendererCairo (canvas.figure.dpi)
renderer.set_width_height (*canvas.get_width_height())
renderer.gc.ctx = cr
canvas.figure.draw (renderer)
cr.restore()
return
self.cache = None
def presentation_draw(self, cr, scaling=None, final=False) :
if self.recache_queue == True :
self.recache_canvas()
self.emit("redraw-request")
if self.glancer is not None :
glancer = aobject.get_object_from_dictionary(self.glancer)
if glancer is not None and glancer.plotter is not None :
canvas = glancer.plotter.canvas
cr.scale(self.basic_canvas_scaling, self.basic_canvas_scaling)
if final :
self.recache_canvas(cr)
else :
if self.cache is None :
self.recache_canvas()
cr.set_source_surface(self.cache, 0, 0)
cr.paint()
else :
cr.save()
cr.rectangle(self.allocation.x,
self.allocation.y,
self.allocation.width,
self.allocation.height)
cr.set_source_rgba(0.4, 0.4, 0.4, 1.0)
cr.fill()
cr.restore() | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glosser/Glancer.py | Glancer.py |
import gtk
import tarfile
from matplotlib.backends.backend_cairo import RendererCairo
import pangocairo
from aobject.utils import *
import pango
import gobject
from .. import glypher
import copy
from lxml import etree
import cairo
from aobject import aobject
from aobject.paths import *
from ..tablemaker import *
import rsvg
import StringIO
from TextView import *
from Glancer import *
from Glypher import *
from Containers import *
GLOSSER_PRESENTATION = True
GLOSSER_DESIGN = False
layout_bgcolor = { GLOSSER_PRESENTATION : (0.,0.,0.),
GLOSSER_DESIGN : (1.,1.,.8) }
widget_types = {
'textview' : ('Text', gtk.STOCK_JUSTIFY_FILL, GlosserTextView,
lambda s : render_stock(s,gtk.STOCK_JUSTIFY_FILL)),
'glyphentry' : ('Equation', 'aes-glypher', GlosserGlyphEntry,
lambda s : render_stock(s,'aes-glypher')),
'glancer' : ('Plot', 'aes-glancer', GlosserGlancer,
lambda s : render_stock(s,'aes-glancer')),
'vbox' : ('VBox', gtk.STOCK_GO_DOWN, GlosserVBox,
lambda s : render_stock(s,gtk.STOCK_GO_DOWN)),
'bullets' : ('VBox', gtk.STOCK_SORT_ASCENDING, GlosserBullets,
lambda s : render_stock(s,gtk.STOCK_SORT_ASCENDING)) }
aname_root_to_ty = {}
for ty in widget_types.keys() :
aname_root_to_ty[widget_types[ty][2].__name__] = ty
def _make_lambda_iw(obj, w) :
return lambda b : obj.insert_widget(w)
def _px_to_float(px_str) :
w = px_str
if w.endswith('px') :
f = float(w[:-2])
else :
f = float(w)
return f
GLOSSER_SPRONK_CELL_NORMAL = 0
GLOSSER_SPRONK_CELL_OPEN = 1
GLOSSER_SPRONK_CELL_CLOSE = 2
GLOSSER_SPRONK_CELL_INITIAL = 3
GLOSSER_SPRONK_CELL_FINAL = 4
glosser_spronk_cell_colours = {\
GLOSSER_SPRONK_CELL_OPEN : ('#DDDDDD', '#FFDDAA'),
GLOSSER_SPRONK_CELL_INITIAL : ('#BBBBBB', '#FFAA00'),
GLOSSER_SPRONK_CELL_NORMAL : ('#BBBBBB', '#FFAA00'),
GLOSSER_SPRONK_CELL_FINAL : ('#BBBBBB', '#FFAA00'),
GLOSSER_SPRONK_CELL_CLOSE : ('#DDDDDD', '#FFDDAA')}
class GlosserSpronkCellRenderer(gtk.GenericCellRenderer) :
__gproperties__ = {
"text": (gobject.TYPE_STRING, "text", "text", "", gobject.PARAM_READWRITE),
"active": (gobject.TYPE_BOOLEAN, "active", "active", False, gobject.PARAM_READWRITE),
"special": (gobject.TYPE_INT, "special", "special",0,4,
GLOSSER_SPRONK_CELL_NORMAL, gobject.PARAM_READWRITE),
}
def __init__(self) :
self.__gobject_init__()
self.spronk_font_face = cairo.ToyFontFace("Linux Libertine 8")
def do_set_property(self, pspec, value) :
setattr(self, pspec.name, value)
def do_get_property(self, pspec) :
return getattr(self, pspec.name)
def on_render(self, window, widget, background_area, cell_area, expose_area,
flags) :
cr = window.cairo_create()
cr.save()
cr.translate(cell_area.x, cell_area.y)
cr.rectangle(0, 0, cell_area.width, cell_area.height)
colour = gtk.gdk.Color(\
glosser_spronk_cell_colours[self.special][1 if self.active else 0])
colour_list = [colour.red_float,
colour.green_float,
colour.blue_float,
1.]
colour_list[3] = .5
cr.set_source_rgba(*colour_list)
cr.fill()
colour_list[3] = 1.
if self.special == GLOSSER_SPRONK_CELL_OPEN :
cr.move_to(cell_area.width/2, cell_area.height)
cr.rel_line_to(-10, 0)
cr.rel_line_to(10, -cell_area.height)
cr.rel_line_to(10, cell_area.height)
cr.close_path()
colour_list[3] = 1.
cr.set_source_rgba(*colour_list)
cr.fill()
elif self.special == GLOSSER_SPRONK_CELL_CLOSE :
cr.move_to(cell_area.width/2, 0)
cr.rel_line_to(-10, 0)
cr.rel_line_to(10, cell_area.height)
cr.rel_line_to(10, -cell_area.height)
cr.close_path()
cr.set_source_rgba(*colour_list)
cr.fill()
elif self.special == GLOSSER_SPRONK_CELL_INITIAL :
cr.move_to(cell_area.width/2,cell_area.height)
cr.arc(cell_area.width/2, cell_area.height, 10, 3.14159, 0)
cr.close_path()
cr.set_source_rgba(*colour_list)
cr.fill()
elif self.special == GLOSSER_SPRONK_CELL_FINAL :
cr.move_to(cell_area.width/2,0)
cr.arc(cell_area.width/2, 0, 10, 0, 3.14159)
cr.close_path()
cr.set_source_rgba(*colour_list)
cr.fill()
elif self.special == GLOSSER_SPRONK_CELL_NORMAL :
cr.rectangle(cell_area.width/2-10, 0, 20, cell_area.height)
cr.set_source_rgba(*colour_list)
cr.fill()
cr.restore()
line_height = 100
def on_get_size(self, widget, cell_area=None):
x = 0
y = 0
if cell_area is not None :
x = cell_area.x
y = cell_area.y
w = 30
h = 10
return (x, y, w, h)
gobject.type_register(GlosserSpronkCellRenderer)
class GlosserSlideCellRenderer(gtk.GenericCellRenderer) :
__gproperties__ = {
"slide": (gobject.TYPE_PYOBJECT, "slide", "Slide", gobject.PARAM_READWRITE),
}
def __init__(self) :
self.__gobject_init__()
self.slide = None
self.spronk_font_face = cairo.ToyFontFace("Linux Libertine 5")
def do_set_property(self, pspec, value) :
setattr(self, pspec.name, value)
def do_get_property(self, pspec) :
return getattr(self, pspec.name)
def on_render(self, window, widget, background_area, cell_area, expose_area,
flags) :
cr = window.cairo_create()
cr.save()
cr.translate(cell_area.x, cell_area.y)
if self.slide is None :
return
ci = self.slide.thumb
if cr is not None and ci is not None :
spnum = len(self.slide.spronks)
if spnum > 0:
cr.save()
mid = ci.get_height()/2
cr.set_source_rgb(.5,.5,.5)
if spnum > 6 :
text = str(len(self.slide.spronks))
cr.set_font_face(self.spronk_font_face)
cr.set_font_size(8.)
for i in range(0, len(text)) :
cr.move_to(0, mid-4*len(text)+8*i)
cr.show_text(text[i])
else :
cr.save()
for i in range(0, spnum) :
cr.move_to(0, mid-3*spnum+6*i)
cr.rel_line_to(0, 6)
cr.rel_line_to(3, -3)
cr.close_path()
cr.fill()
cr.restore()
cr.restore()
cr.save()
cr.translate(5, 0)
cr.set_source_surface(ci, 0, 0)
cr.paint()
cr.restore()
cr.restore()
line_height = 100
def on_get_size(self, widget, cell_area=None):
x = 0
y = 0
if cell_area is not None :
x = cell_area.x
y = cell_area.y
if self.slide is None or self.slide.thumb is None :
w = self.line_height
h = self.line_height
else :
w = int(self.slide.thumb.get_width())
h = int(self.slide.thumb.get_height())
return (x, y, w, h)
#ims = self.main_phrase.cairo_cache_image_surface
gobject.type_register(GlosserSlideCellRenderer)
def _fill_text(root, nodeid, text) :
node = root.xpath('//*[@id=\'%s\']'%nodeid)
if len(node) != 1 :
return
#raise RuntimeError('Malformed template SVG :'+\
# ' Expected 1 id to be '+nodeid)
node = node[0]
#for child in node :
# node.remove(child)
#text_node = _make_text_tag()
text_node = node[0]
text_node.text = text
node.append(text_node)
def _make_text_tag() :
el = etree.Element('tspan', role='line')
return el
class GlosserLayout(gtk.Layout) :
__gsignals__ = { \
"expose-event" : "override", \
"button-press-event" : "override",\
"button-release-event" : "override",\
"scroll-event" : "override",\
"size-allocate" : "override",\
}
handle = None
width = None
height = None
line_num = 0
line_indicators = ()
default_height = 100.
lines = None
def do_size_allocate(self, allocation) :
gtk.Layout.do_size_allocate(self, allocation)
self._resize_to_allocation(allocation)
def __init__(self, presentation=False):
gtk.Layout.__init__(self)
self.add_events(gtk.gdk.POINTER_MOTION_MASK)
self.add_events(gtk.gdk.BUTTON_PRESS_MASK)
self.add_events(gtk.gdk.BUTTON_RELEASE_MASK)
self.add_events(gtk.gdk.SCROLL_MASK)
self.pilcrow_font_face = cairo.ToyFontFace("Linux Libertine 20")
self.am_presentation = presentation
def do_expose_event(self, event):
cr = self.get_bin_window().cairo_create()
cr.rectangle ( event.area.x, event.area.y, event.area.width, event.area.height)
cr.clip()
self.draw(cr, *self.get_bin_window().get_size())
def get_rat(self) :
al = self.get_allocation()
swidth = al.width
sheight = al.height
twidth = self.width
theight = self.height
rath = sheight/float(theight)
ratw = swidth/float(twidth)
rat = min(rath, ratw)
if self.show_decoration :
rat *= 0.8
return rat
def translate_dist(self, d, rev=False) :
rat = self.get_rat()
if rev :
d /= rat
else :
d *= rat
return d
def translate_dim(self, w, h) :
rat = self.get_rat()
w *= rat
h *= rat
return w, h
def translate_body_pos(self, x, y, rev=False) :
if not rev :
x += self.body_x
y += self.body_y
pos = self.translate_pos(x, y, rev=rev)
if rev :
pos = (pos[0]-self.body_x, pos[1]-self.body_y)
return pos
def translate_pos(self, x, y, rev=False) :
al = self.get_allocation()
swidth = al.width
sheight = al.height
twidth = self.width
theight = self.height
rat = self.get_rat()
if rev :
x -= .5*(swidth-rat*twidth)
y -= .5*(sheight-rat*theight)
x /= rat
y /= rat
else :
x *= rat
y *= rat
x += .5*(swidth-rat*twidth)
y += .5*(sheight-rat*theight)
return x, y
def draw(self, cr, swidth, sheight):
cr.save()
cr.set_source_rgb(*layout_bgcolor[self.am_presentation])
cr.rectangle(0, 0, swidth, sheight); cr.fill()
if self.handle is not None :
rat = self.get_rat()
twidth = self.width
theight = self.height
#rat = swidth/float(twidth)
#if self.show_decoration :
# rat *= 0.8
cr.translate(0.5*(swidth-rat*twidth),
0.5*(sheight-rat*theight))
cr.scale(rat, rat)
if self.show_decoration :
blurrad = 2.
for i in range(1,10) :
cr.rectangle(-blurrad*i,-blurrad*i,twidth+2*blurrad*i,theight+2*blurrad*i)
cr.set_source_rgba(0.,0.,0.,0.5/float(i))
cr.fill()
self.handle.render_cairo(cr)
if self.show_decoration :
y = 0
i = 0
current_y = None
current_height = None
for line in self.lines :
cr.set_source_rgba(0.85,0.85,0.85,1.)
cr.rectangle(self.body_x-13, self.body_y+y, 10., line[1].h)
cr.fill()
if i == self.line_num :
current_y = y
current_height = line[1].h
y += line[1].h
i += 1
if current_y is None :
current_y = y
if self.current_widget is not None :
cr.set_source_rgba(0.5,0.5,0.5,1.)
cr.rectangle(self.body_x-18,
self.body_y+self.current_widget.y, 5,
self.current_widget.h)
cr.fill()
rx, ry = (self.body_x-13, self.body_y+current_y)
if current_height is not None :
linear_gradient = cairo.LinearGradient(rx, ry, rx,
ry+current_height)
linear_gradient.add_color_stop_rgba(0, 0.85, 0.85, 0.85, 1.)
linear_gradient.add_color_stop_rgba(0.25, 0.7, 0.7, 0.7, .8)
linear_gradient.add_color_stop_rgba(0.75, 0.7, 0.7, 0.7, .8)
linear_gradient.add_color_stop_rgba(1, 0.85, 0.85, 0.85, 1.)
else :
current_height = self.default_height
linear_gradient = cairo.LinearGradient(rx, ry, rx,
ry+current_height)
linear_gradient.add_color_stop_rgba(0, 0.85, 0.85, 0.85, 1.)
linear_gradient.add_color_stop_rgba(0.5, 0.7, 0.7, 0.7, .2)
linear_gradient.add_color_stop_rgba(1, 0.85, 0.85, 0.85, 0.)
cr.set_source(linear_gradient)
cr.rectangle(rx, ry, 10., current_height)
cr.fill()
cr.move_to(self.body_x-30, self.body_y+current_y)
cr.set_source_rgba(0.8,0.8,0.8,1.)
cr.set_font_face(self.pilcrow_font_face)
cr.set_font_size(20.)
cr.show_text(u'\u00b6')
#cr.set_source_rgba(0.4,0.4,0.4,1.)
i = 0
for ind in self.line_indicators :
cr.save()
cr.translate(self.body_x-35,
self.body_y+current_y+\
i*20)
if isinstance(ind, gtk.gdk.Pixbuf) :
cr.set_source_pixbuf(ind, 0, 0)
cr.paint()
cr.stroke()
else :
cr.move_to(4, 20)
cr.show_text(ind)
cr.restore()
i += 1
#cr.set_source_rgba(0.8,0.8,0.8,1.)
#cr.set_line_width(10.)
#cr.move_to(self.body_x-8, self.body_y+current_y)
#cr.rel_line_to(0, current_height)
#cr.stroke()
cr.restore()
def do_button_press_event(self, event) :
return False
def do_button_release_event(self, event) :
return False
def do_scroll_event(self, event) :
return False
def _resize_to_allocation(self, allocation=None) :
pass
class GlosserSlide(aobject.AObject) :
title = None
num = None
thumb = None
lines = None
spronks = None
variant = 'basic'
def aes_get_parameters(self) :
return {'num' : self.get_num()}
def pre_spronk(self) :
for l in self.lines :
l[1].initial_spronk()
if l[1].container :
for w in l[1].contained :
w.initial_spronk()
def execute_all_spronks(self) :
self.pre_spronk()
for n in range(0, len(self.spronks)) :
self.execute_spronk(n)
def execute_spronk(self, num, reverse=False) :
n = 1 if reverse else 0
for action_pair in self.spronks[num] :
if action_pair[n] is not None :
action_pair[n]()
def add_to_spronk(self, num, action, reverse=None) :
for l in range(len(self.spronks), num+1) :
self.spronks.append([])
self.spronks[num].append((action, reverse))
def remove_from_spronks(self, action) :
for spronk in self.spronks :
for action_pair in spronk :
if action == action_pair[0] :
spronk.remove(action_pair)
#PROPERTIES
def get_auto_aesthete_properties(self):
return {
'num' : (int,),
'title' : (str,),
'variant' : (str,),
}
#BEGIN PROPERTIES FUNCTIONS
def get_title(self) :
return self.title if self.title is not None else ''
def change_title(self, val) :
self.title = val if val != "" else None
self.reload()
def change_variant(self, val) :
self.variant = val
self.reload()
#END PROPERTIES FUNCTIONS
def reload(self) :
self.reload_slide(self.num, change_current=False)
def __init__(self, num, variant, reload_slide, variant_lsto, env=None) :
self.reload_slide = reload_slide
self.num = num
self.lines = []
self.spronks = []
self.variant_lsto = variant_lsto
aobject.AObject.__init__(self, name_root='GlosserSlide',
env=env,
view_object=False,
elevate=False)
self.set_variant(variant)
self.action_panel = self.make_action_panel()
def render_widgets_to_cairo(self, cr, ignore_spronks=True) :
cr.save()
for l in self.lines :
cr.save()
cr.translate(l[1].x, l[1].y)
l[1].presentation_draw(cr, 1., ignore_spronks=ignore_spronks)
cr.restore()
cr.restore()
def remove_line(self, n) :
self.lines[n][1].remove_from_layouts()
self.lines.remove(self.lines[n])
def insert_line(self, n, ty, widget, line_height, alignment) :
self.lines.insert(n, [ty, widget, line_height, alignment])
def set_title(self, title='') :
self.change_property('title', title)
def get_num(self) :
return self.num
def set_thumb(self, thumb) :
self.thumb = thumb
def get_thumb(self) :
return self.thumb
def fill_in_xml(self, xml) :
root = xml.getroot()
title_text = self.title
if title_text is None : title_text = '[NO TITLE]'
_fill_text(root, 'glosser_title', title_text)
num_text = str(self.get_num()+1)
_fill_text(root, 'glosser_number', num_text)
def make_action_panel(self) :
win = gtk.VBox()
line_table_maker = PreferencesTableMaker()
line_table_maker.append_row("Title",
self.aes_method_entry_update("title"))
variant_cmbo = gtk.ComboBox(self.variant_lsto)
variant_crtx = gtk.CellRendererText()
variant_cmbo.pack_start(variant_crtx, True)
variant_cmbo.add_attribute(variant_crtx, 'text', 1)
line_table_maker.append_row('Variant',
variant_cmbo)
self.aes_method_automate_combo(variant_cmbo, 'variant', 0)
win.aes_title = "Slide %s" % self.get_num()
win.pack_start(line_table_maker.make_table())
win.show_all()
return win
class GlosserBasic(gtk.VBox) :
complete_widgets_list = None
presentation = None
def go_previous(self) :
self.set_slide(self.slides.index(self.current_slide)-1)
def go_next(self) :
self.set_slide(self.slides.index(self.current_slide)+1)
def go_first(self) :
self.set_slide(0)
def go_last(self) :
self.set_slide(len(self.slides)-1)
def do_key_release_event(self, event):
keyname = gtk.gdk.keyval_name(event.keyval)
m_control = bool(event.state & gtk.gdk.CONTROL_MASK)
m_shift = bool(event.state & gtk.gdk.SHIFT_MASK)
m_alt = bool(event.state & gtk.gdk.MOD1_MASK)
m_super = bool(event.state & gtk.gdk.SUPER_MASK)
if m_control and not m_alt and not m_shift :
if keyname == 'Home' :
self.go_first()
return True
if keyname == 'End' :
self.go_last()
return True
if keyname == 'Page_Up' :
self.go_previous()
if keyname == 'Page_Down' :
self.go_next()
if keyname == 'Escape' and self.presentation is not None :
self.get_parent().unfullscreen()
self.get_parent().hide()
self.presentation.grab_focus()
return True
return False
__gsignals__ = { \
"key-release-event" : "override",\
}
slide_list = None
default_template = 'basic'
current_slide = None
current_xml = None
current_variant = ""
template = None
conference = None
date = None
institute = None
presenter = None
def set_current_line_alignment(self, al) :
if self.current_line in range(0, len(self.current_slide.lines)) :
self.current_slide.lines[self.current_line][3] = al
self.do_widget_pos_update(self, None)
def __init__(self, layout=None, template=template, slides=None, complete_widgets_list=None):
gtk.VBox.__init__(self)
if layout is None :
layout = GlosserLayout()
self.layout = layout
self.connect_after("size-allocate", self.do_widget_pos_update)
self.set_property("can-focus", True)
self.add_events(gtk.gdk.KEY_RELEASE_MASK)
if template is None :
template = { 'name' : self.default_template }
self.template = template
if complete_widgets_list is None :
complete_widgets_list = []
self.complete_widgets_list = complete_widgets_list
if slides is None :
self.slides = []
else :
self.slides = slides
self.layout.show_decoration = False
self.pack_start(self.layout)
self.grab_focus()
self.set_slide(0)
self.show_all()
def do_widget_pos_update(self, widget, req) :
if self.current_slide is None :
return
y = 0
for l in self.current_slide.lines :
widget = l[1]
#new_pos = map(int,self.layout.translate_pos(
# self.template['body']['x'],
# self.template['body']['y']+y))
if l[3] == 'left' :
offset = 0
elif l[3] == 'centre' :
offset = self.template["body"]["width"]-widget.w
offset *= 0.5
elif l[3] == 'right' :
offset = self.template["body"]["width"]-widget.w
#new_pos[0] += int(offset)
#new_x, new_y = map(int,self.layout.translate_pos(new_pos[0],
# new_pos[1],
# rev=True))
#new_x, new_y = self.template["body"]["x"], self.template["body"]["y"]+y
new_x = int(offset)
new_y = y
#offset = self.layout.translate_pos(body_x, body_y+y)
if abs(new_x-widget.x) > 5 or abs(new_y-widget.y) > 5:
widget.move(new_x, new_y)
#if widget.current_offset[subwidget] != \
# offset or\
# widget.current_scaling[subwidget] != \
# self.layout.translate_dist(1) or \
# (widget.x, widget.y) != (new_x,new_y) :
# widget.offset(subwidget, *offset)
# widget.rescale(subwidget, self.layout.translate_dist(1))
# widget.move(new_x, new_y)
y += widget.h
def set_template_variant_xml(self, variant, force=False) :
if variant == self.current_variant and not force :
return
self.current_variant = variant
self.current_xml = copy.deepcopy(self.template['variants'][variant]['xml'])
root = self.current_xml.getroot()
body_node = root.xpath('//*[@id=\'glosser_body\']')
if len(body_node) != 1 :
raise RuntimeError('Malformed template SVG :'+\
' Expected one "rect" id to be glosser_body')
body_node = body_node[0]
body = { 'x' : _px_to_float(body_node.get('x')),
'y' : _px_to_float(body_node.get('y')),
'width' : _px_to_float(body_node.get('width')),
'height' : _px_to_float(body_node.get('height')) }
self.template['body'] = body
body_node.getparent().remove(body_node)
def set_slide(self, n=None, change_current=True, xml=None) :
have_changed = False
old_slide = self.current_slide
if n is not None :
if n < 0 or n >= len(self.slides) :
return
slide = self.slides[n]
if change_current and slide != self.current_slide :
have_changed = True
self.current_slide = slide
else :
slide = self.current_slide
# Even if we do not want to change TO this slide, we may still need to
# update it
change_current = change_current or slide==self.current_slide
if slide is None :
return
n = slide.num
if self.current_xml is None :
return
self.set_template_variant_xml(slide.variant)
if xml is None :
if change_current :
xml = self.current_xml
else :
xml = copy.deepcopy(self.current_xml)
slide.fill_in_xml(xml)
root = xml.getroot()
conf_text = self.conference
if conf_text is None : conf_text = ''
_fill_text(root, 'glosser_conference', conf_text)
_fill_text(root, 'glosser_total', str(len(self.slides)))
attributes = ['presenter', 'institute', 'date', 'conference']
for attribute in attributes :
val = self.__getattribute__(attribute)
if val is None :
val = ''
_fill_text(root, 'glosser_'+attribute, str(val))
_fill_text(root, 'glosser_date_big', self.date)
_fill_text(root, 'glosser_presenter_institute', "%s (%s)" % (\
self.presenter,
self.institute))
if have_changed and self.presentation is None and self.get_aenv() is not None and \
self.get_aenv().action_panel is not None :
self.get_aenv().action_panel.to_action_panel(slide.action_panel)
if change_current :
h = self.template['body']['height']
if old_slide != self.current_slide :
if old_slide is not None :
for line in old_slide.lines :
line[1].remove_from_layouts()
for line in slide.lines :
widget = line[1]
line[1].restore_to_layouts()
slide.pre_spronk()
self.do_widget_pos_update(self, None)
self.redraw(slide, xml, change_current=change_current)
def redraw(self, slide=None, xml=None, change_current=True) :
if slide is None :
slide = self.current_slide
if xml is None :
self.set_template_variant_xml(slide.variant)
xml = self.current_xml
if xml is not None and \
slide is not None :
svg = StringIO.StringIO()
xml.write(svg)
handle = rsvg.Handle()
handle.write(svg.getvalue())
handle.close()
if change_current :
self.layout.width = self.template['width']
self.layout.height = self.template['height']
self.layout.handle = handle
#self.layout.line_num = self.current_line
self.layout.lines = slide.lines
#self.layout.current_widget = self.current_widget
#if self.current_line < len(slide.lines) :
# ty = slide.lines[self.current_line][0]
# self.layout.line_indicators = \
# [ widget_types[ty][3](self.get_style())]
# if self.current_widget is not None and self.current_widget != slide.lines[self.current_line][1]:
# ty = self.current_widget.ty
# self.layout.line_indicators.append(widget_types[ty][3](self.get_style()))
#else :
# self.layout.line_indicators = ( u'\u273c', )
self.layout.body_x = self.template['body']['x']
self.layout.body_y = self.template['body']['y']
self.layout.body_w = self.template['body']['width']
self.layout.body_h = self.template['body']['height']
self.layout.queue_draw()
class GlosserPresentation(GlosserBasic) :
presentation = None
current_spronk = 0
def go_first(self) :
self.set_slide(0)
self.current_spronk = 0
def go_last(self) :
self.set_slide(len(self.slides)-1)
self.current_slide.execute_all_spronks()
self.current_spronk = len(self.current_slide.spronks)
def go_previous(self) :
if self.current_spronk == 0 :
self.set_slide(self.slides.index(self.current_slide)-1)
else :
self.current_spronk -= 1
self.current_slide.execute_spronk(self.current_spronk, reverse=True)
def go_next(self) :
if self.current_spronk == len(self.current_slide.spronks) :
if self.current_slide == self.slides[-1] :
return
else :
self.set_slide(self.slides.index(self.current_slide)+1)
self.current_spronk = 0
else :
self.current_slide.execute_spronk(self.current_spronk, reverse=False)
self.current_spronk += 1
def do_key_release_event(self, event):
keyname = gtk.gdk.keyval_name(event.keyval)
m_control = bool(event.state & gtk.gdk.CONTROL_MASK)
m_shift = bool(event.state & gtk.gdk.SHIFT_MASK)
m_alt = bool(event.state & gtk.gdk.MOD1_MASK)
m_super = bool(event.state & gtk.gdk.SUPER_MASK)
debug_print(keyname)
if keyname == 'Escape' :
self.get_parent().unfullscreen()
self.get_parent().hide()
self.presentation.grab_focus()
return True
return GlosserBasic.do_key_release_event(self, event)
def __init__(self, slides=None, template=None, complete_widgets_list=None, presentation=None):
self.presentation = presentation
layout = GlosserLayout(presentation=GLOSSER_PRESENTATION)
GlosserBasic.__init__(self, layout=layout, template=template, slides=slides,
complete_widgets_list=complete_widgets_list)
class Glosser(GlosserBasic, aobject.AObject) :
current_widget = None
#def change_current_line(self, n=None) :
# ret = GlosserBasic.change_current_line(self, n=n)
# if not ret :
# return False
# if n < len(self.current_slide.lines) :
# self.change_current_widget(self.current_slide.lines[n][1])
# self.redraw()
from_spronk = None
def update_spronk_strip(self) :
if self.current_slide is None :
return
slide = self.current_slide
self.spronk_buffer.clear()
w = self.current_widget
cwh = w is None or w.initial_hide
self.spronk_buffer.insert(0, (GLOSSER_SPRONK_CELL_OPEN, not cwh, u'\u21c8'))
self.spronk_buffer.insert(1, (GLOSSER_SPRONK_CELL_INITIAL, not cwh, 'Initial'))
l = 2
for spronk in slide.spronks :
colour = '#BBBBBB'
for action_pair in spronk :
action = action_pair[0]
if w and action == w.show :
cwh = False
if w and action == w.hide :
cwh = True
if not cwh :
colour = '#FFAA00'
text = ''
ty = GLOSSER_SPRONK_CELL_NORMAL if len(slide.spronks)+1 > l else \
GLOSSER_SPRONK_CELL_FINAL
self.spronk_buffer.insert(l, (ty, not cwh, text))
l += 1
post_colour = '#DDDDDD'
if not cwh :
post_colour = '#FFDD00'
self.spronk_buffer.insert(l, (GLOSSER_SPRONK_CELL_CLOSE, not cwh, u'\u21ca'))
self.slide_list_trvw.queue_draw()
current_widget = None
def change_current_line(self, n=None) :
if self.current_slide is None :
self.current_line = 0
return False
if n is None :
n = self.current_line
if n > len(self.current_slide.lines) :
n = len(self.current_slide.lines)
elif n < 0 :
n = 0
self.current_line = n
if self.get_current_widget_in_line() is None :
self.change_current_widget(None)
elif self.get_current_widget_in_line() == -1 :
self.change_current_widget(self.current_slide.lines[n][1])
self.redraw()
def get_current_widget_in_line(self) :
if self.current_slide is None :
return None
if 0 > self.current_line or len(self.current_slide.lines) <= self.current_line:
return None
if self.current_widget == self.current_slide.lines[self.current_line][1] :
return -1
if not self.current_slide.lines[self.current_line][1].container :
return -1
if self.current_widget not in self.current_slide.lines[self.current_line][1].contained:
return -1
return self.current_slide.lines[self.current_line][1].contained.index(self.current_widget)
current_line = 0
def change_current_widget(self, widget) :
if widget == self.current_widget :
return
if widget.slide != self.current_slide :
return
self.current_widget = widget
self.update_spronk_strip()
if widget is not None :
widget.design_widget.grab_focus()
win = widget.get_action_panel()
if win is not None and self.presentation is not None and self.get_aenv() is not None and \
self.get_aenv().action_panel is not None :
self.get_aenv().action_panel.to_action_panel(win)
def do_key_release_event(self, event):
keyname = gtk.gdk.keyval_name(event.keyval)
m_control = bool(event.state & gtk.gdk.CONTROL_MASK)
m_shift = bool(event.state & gtk.gdk.SHIFT_MASK)
m_alt = bool(event.state & gtk.gdk.MOD1_MASK)
m_super = bool(event.state & gtk.gdk.SUPER_MASK)
if m_control and not m_alt and not m_shift :
if keyname == 'Down' :
widpos = self.get_current_widget_in_line()
if widpos is None :
self.change_current_line(self.current_line+1)
else :
line_widget = self.current_slide.lines[self.current_line][1]
if not line_widget.container or widpos == len(line_widget.contained)-1 :
self.change_current_line(self.current_line+1)
else :
self.change_current_widget(line_widget.contained[widpos+1])
self.redraw()
#if self.current_line < 0 or \
# self.current_line >= len(self.current_slide.lines) :
# self.change_current_line(self.current_line+1)
#if line_widget.container and len(line_widget.contained) > 0 :
# if self.current_widget is None :
# self.current_widget = line_widget.contained[0]
# self.redraw()
# elif line_widget.contained[-1] == self.current_widget :
# self.change_current_line(self.current_line+1)
# else :
# self.current_widget = \
# line_widget.contained[1+
# line_widget.contained.index(self.current_widget)]
# self.redraw()
#else :
# self.change_current_line(self.current_line+1)
return True
elif keyname == 'Up' :
widpos = self.get_current_widget_in_line()
if widpos is None or widpos == -1 :
self.change_current_line(self.current_line-1)
else :
line_widget = self.current_slide.lines[self.current_line][1]
if widpos == 0 :
self.change_current_widget(line_widget)
self.redraw()
else :
self.change_current_widget(line_widget.contained[widpos-1])
self.redraw()
#if self.current_line < 0 or \
# self.current_line >= len(self.current_slide.lines) :
# self.change_current_line(self.current_line-1)
#line_widget = self.current_slide.lines[self.current_line][1]
#if line_widget.container and len(line_widget.contained) > 0 :
# if self.current_widget is None :
# self.change_current_line(self.current_line-1)
# elif line_widget.contained[0] == self.current_widget :
# self.current_widget = None
# self.redraw()
# else :
# self.current_widget = \
# line_widget.contained[-1+
# line_widget.contained.index(self.current_widget)]
# self.redraw()
#else :
# self.change_current_line(self.current_line-1)
#return True
elif keyname == 'BackSpace' :
self.remove_current_line()
return True
return GlosserBasic.do_key_release_event(self, event)
def remove_current_line(self) :
if self.current_line in range(0,len(self.current_slide.lines)) :
self.complete_widgets_list.remove(self.current_slide.lines[self.current_line][1])
self.current_slide.remove_line(self.current_line)
self.change_current_line()
self.set_slide()
def __del__(self) :
aobject.AObject.__del__(self)
#PROPERTIES
def get_auto_aesthete_properties(self):
return {
'conference' : (str,(aobject.AOBJECT_CAN_NONE,)),
'date' : (str,(aobject.AOBJECT_CAN_NONE,)),
'presenter' : (str,(aobject.AOBJECT_CAN_NONE,)),
'institute' : (str,(aobject.AOBJECT_CAN_NONE,)),
'template' : (str,),
}
#BEGIN PROPERTIES FUNCTIONS
def get_template(self, val=None) :
return self.template['name'] if self.template is not None else ''
def change_template(self, val) :
if val == '' :
val = None
self.set_template(val)
def change_date(self, val) :
if val == '' :
self.date = None
else :
self.date = val
self.presentation_instance.date = val
self.set_slide()
def change_presenter(self, val) :
if val == '' :
self.presenter = None
else :
self.presenter = val
self.presentation_instance.presenter = val
self.set_slide()
def change_institute(self, val) :
if val == '' :
self.institute = None
else :
self.institute = val
self.presentation_instance.institute = val
self.set_slide()
def change_conference(self, val) :
if val == '' :
self.conference = None
else :
self.conference = val
self.presentation_instance.conference = val
self.set_slide()
#END PROPERTIES FUNCTIONS
def make_action_panel(self) :
win = gtk.VBox()
line_table_maker = PreferencesTableMaker()
line_table_maker.append_row("Conf",
self.aes_method_entry_update("conference"))
line_table_maker.append_row("Presenter",
self.aes_method_entry_update("presenter"))
line_table_maker.append_row("Date",
self.aes_method_entry_update("date"))
line_table_maker.append_row("Institute",
self.aes_method_entry_update("institute"))
template_cmbo = gtk.ComboBox(template_lsto)
template_crtx = gtk.CellRendererText()
template_cmbo.pack_start(template_crtx, True)
template_cmbo.add_attribute(template_crtx, 'text', 1)
line_table_maker.append_row("Template", template_cmbo)
self.aes_method_automate_combo(template_cmbo, 'template', 0)
win.aes_title = "Presentation"
win.pack_start(line_table_maker.make_table())
win.show_all()
return win
def __init__(self, slides=None, complete_widgets_list=None, env=None):
self.slide_list = gtk.ListStore(gobject.TYPE_INT, gobject.TYPE_PYOBJECT)
self.variant_lsto = gtk.ListStore(str, str)
self.template = {'name':self.default_template}
if complete_widgets_list is None :
complete_widgets_list = []
self.complete_widgets_list = complete_widgets_list
if slides is None :
self.slides = []
else :
self.slides = slides
self.layout = GlosserLayout(presentation=GLOSSER_DESIGN)
self.presentation_instance = GlosserPresentation(slides=self.slides,
template=self.template,
complete_widgets_list=complete_widgets_list,
presentation=self)
self.presentation_window = gtk.Window()
self.presentation_window.add(self.presentation_instance)
self.presentation_instance.show_all()
self.presentation_window.hide()
gtk.VBox.__init__(self)
aobject.AObject.__init__(self, name_root="Glosser", env=env,
view_object=True,
elevate=True)
self.connect_after("size-allocate", self.do_widget_pos_update)
self.add_events(gtk.gdk.KEY_RELEASE_MASK)
self.presentation_window.connect_after("show", lambda w : (w.fullscreen(), w.grab_focus()))
self.layout.show_decoration = True
self.set_template()
#self.new_slide(0, 'basic')
toolbar = gtk.Toolbar()
present_tbut = gtk.ToolButton('aes-glosser')
present_tbut.connect("clicked", lambda w :(\
self.presentation_instance.go_first(),
self.presentation_window.show()))
toolbar.insert(present_tbut, -1)
pdf_tbut = gtk.ToolButton(gtk.STOCK_SAVE_AS)
pdf_tbut.connect("clicked", lambda w :\
self.run_chooser())
toolbar.insert(pdf_tbut, -1)
toolbar.insert(gtk.SeparatorToolItem(), -1)
for w in widget_types :
wt = widget_types[w]
new_tbut = gtk.ToolButton(wt[1])
new_tbut.set_tooltip_text(wt[0])
new_tbut.connect('clicked', _make_lambda_iw(self, w))
toolbar.insert(new_tbut, -1)
toolbar.insert(gtk.SeparatorToolItem(), -1)
left_tbut = gtk.ToolButton(gtk.STOCK_JUSTIFY_LEFT)
left_tbut.connect("clicked", lambda w : \
self.set_current_line_alignment('left'))
toolbar.insert(left_tbut, -1)
centre_tbut = gtk.ToolButton(gtk.STOCK_JUSTIFY_CENTER)
centre_tbut.connect("clicked", lambda w : \
self.set_current_line_alignment('centre'))
toolbar.insert(centre_tbut, -1)
right_tbut = gtk.ToolButton(gtk.STOCK_JUSTIFY_RIGHT)
right_tbut.connect("clicked", lambda w : \
self.set_current_line_alignment('right'))
toolbar.insert(right_tbut, -1)
self.pack_start(toolbar, False)
self.layout_hbox = gtk.HBox()
self.spronk_buffer = gtk.ListStore(int, bool, str)
spronk_strip = gtk.TreeView(self.spronk_buffer)
spronk_strip.connect("row-activated", self.do_spronk_strip_row_activated)
spronk_strip.get_selection().connect("changed",
self.do_spronk_strip_selection_changed)
spronk_strip.get_selection().set_mode(gtk.SELECTION_MULTIPLE)
spronk_column = gtk.TreeViewColumn('Spronk')
spronk_strip.append_column(spronk_column)
spronk_tvcr = GlosserSpronkCellRenderer()
spronk_column.pack_start(spronk_tvcr, True)
spronk_column.add_attribute(spronk_tvcr, 'special', 0)
spronk_column.add_attribute(spronk_tvcr, 'active', 1)
spronk_column.add_attribute(spronk_tvcr, 'text', 2)
self.layout_hbox.pack_start(self.layout)
self.layout_hbox.pack_start(spronk_strip, False)
self.pack_start(self.layout_hbox)
self.grab_focus()
self.set_slide(0)
self.presentation_instance.set_slide(0)
self.show_all()
self.action_panel = self.make_action_panel()
if self.get_aenv() is not None :
self.get_aenv().action_panel.to_action_panel(self.action_panel)
old_sel = None
old_old_sel = None
def do_spronk_strip_selection_changed(self, selection) :
if self.current_slide is None or self.current_widget is None :
return
self.old_old_sel = self.old_sel
self.old_sel = selection.get_selected_rows()
def do_spronk_strip_row_activated(self, spronk_strip, path, column) :
if self.current_slide is None :
return
spronks = self.current_slide.spronks
if self.old_old_sel is not None :
selected_rows = self.old_old_sel[1]
else :
selected_rows = spronk_strip.get_selection().get_selected_rows()[1]
if len(selected_rows) == 1 and path == (0,) :
spronks.insert(0, [])
elif len(selected_rows) == 1 and path == (len(spronks)+2,):
spronks.insert(len(spronks), [])
elif self.current_widget is not None :
self.current_slide.remove_from_spronks(self.current_widget.show)
self.current_slide.remove_from_spronks(self.current_widget.hide)
self.current_widget.initial_hide = ((0,) not in selected_rows and \
(1,) not in selected_rows )
debug_print(self.current_widget.initial_hide)
current_hide = self.current_widget.initial_hide
for l in range(0, len(self.current_slide.spronks)) :
if (((l+2,) in selected_rows) == current_hide) :
if current_hide :
self.current_slide.add_to_spronk(l,
self.current_widget.show,
self.current_widget.hide)
current_hide = False
else :
self.current_slide.add_to_spronk(l,
self.current_widget.hide,
self.current_widget.show)
current_hide = True
self.update_spronk_strip()
debug_print(path)
def do_widget_size_allocate(self, widget, req) :
line = widget.line
line[2] = self.layout.translate_dist(req.height, rev=True)
self.redraw()
line_max = 5
def insert_widget(self, ty, slide_num=None) :
if slide_num is not None :
slide = self.slides[slide_num]
else :
slide = self.current_slide
if self.template is None or slide is None :
return
w = self.template['body']['width']
h = self.template['body']['height']
widget = widget_types[ty][2](slide,
self.layout,
self.presentation_instance.layout,
env=self.get_aenv())
for sw in widget.subwidgets :
sw.modify_bg(gtk.STATE_NORMAL,
gtk.gdk.Color(self.template['params']['bgcolor']))
self.complete_widgets_list.append(widget)
self.absorb_properties(widget, as_self=False)
design_widget = widget.subwidgets[GLOSSER_WIDGET_DESIGN]
#design_widget.set_property('width-request', int(self.layout.translate_dist(w)))
line_height = h/self.line_max
if self.current_line < len(slide.lines) and \
slide.lines[self.current_line][1].container :
cntr = slide.lines[self.current_line][1]
cntr.append(widget)
self.change_current_widget(widget)
else :
slide.insert_line(self.current_line,
ty,
widget,
line_height,
'left')
widget.line = slide.lines[self.current_line]
self.change_current_widget(widget)
widget.connect_after("redraw-request", lambda w :
self.redraw(thumb_only=True))
self.set_slide()
#self.do_widget_size_allocate(widget, design_widget.allocation)
#self.do_widget_pos_update(self, None)
design_widget.grab_focus()
return widget
def new_slide(self, num, variant) :
num = int(num)
self.slides.insert(num, GlosserSlide(num,
variant=variant,
reload_slide=self.set_slide,
variant_lsto=self.variant_lsto,
env=self.get_aenv()))
self.absorb_properties(self.slides[num], as_self=False)
self.slide_list.append((num+1, self.slides[num],))
self.slides[num].reload()
self.set_slide(num)
self.presentation_instance.set_slide(num)
return self.slides[num]
def set_slide(self, n=None, change_current=True, xml=False) :
self.presentation_instance.set_slide(n,
change_current=change_current)
GlosserBasic.set_slide(self, n=n, change_current=change_current)
if change_current :
self.update_spronk_strip()
self.change_current_line()
def run_chooser(self, env = None) :
chooser = gtk.FileChooserDialog(title="Export PDF",
parent=self.get_toplevel(),
action=gtk.FILE_CHOOSER_ACTION_SAVE,
buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_SAVE,gtk.RESPONSE_OK))
chooser.set_default_response(gtk.RESPONSE_OK)
resp = chooser.run()
if resp == gtk.RESPONSE_OK :
filename = chooser.get_filename()
chooser.destroy()
else :
chooser.destroy()
return
self.render_to_pdf(filename)
def render_to_pdf(self, filename) :
pdfsurface = cairo.PDFSurface(filename,
int(self.template['width']),
int(self.template['height']))
pdf_cr = cairo.Context(pdfsurface)
for slide in self.slides :
self.set_template_variant_xml(slide.variant)
xml = copy.deepcopy(self.current_xml)
self.presentation_instance.set_slide(n=self.slides.index(slide),
xml=xml,
change_current=False)
slide.pre_spronk()
for n in range(0, len(slide.spronks)+1) :
pdf_cr.save()
svg = StringIO.StringIO()
xml.write(svg)
handle = rsvg.Handle()
handle.write(svg.getvalue())
handle.close()
handle.render_cairo(pdf_cr)
pdf_cr.translate(self.template["body"]["x"],
self.template["body"]["y"])
slide.render_widgets_to_cairo(pdf_cr, ignore_spronks=False)
pdf_cr.restore()
pdf_cr.show_page()
if n < len(slide.spronks) :
slide.execute_spronk(n)
thumb_rat = .2
def redraw(self, slide=None, xml=None, change_current=True, thumb_only=False) :
if not thumb_only :
self.presentation_instance.redraw(slide, xml, change_current=False)
if slide is None :
slide = self.current_slide
if xml is None :
self.set_template_variant_xml(slide.variant)
xml = self.current_xml
if xml is not None and \
slide is not None :
svg = StringIO.StringIO()
xml.write(svg)
handle = rsvg.Handle()
handle.write(svg.getvalue())
handle.close()
thumb_rat = self.thumb_rat
thumb = cairo.ImageSurface(cairo.FORMAT_ARGB32,
int(self.template['width']*thumb_rat),
int(self.template['height']*thumb_rat))
thumb_cr = cairo.Context(thumb)
thumb_cr.scale(thumb_rat, thumb_rat)
handle.render_cairo(thumb_cr)
thumb_cr.translate(self.template["body"]["x"],
self.template["body"]["y"])
slide.render_widgets_to_cairo(thumb_cr)
thumb.flush()
slide.set_thumb(thumb)
if change_current and not thumb_only :
self.layout.width = self.template['width']
self.layout.height = self.template['height']
self.layout.handle = handle
self.layout.line_num = self.current_line
self.layout.lines = slide.lines
self.layout.current_widget = self.current_widget
if self.current_line < len(slide.lines) :
ty = slide.lines[self.current_line][0]
self.layout.line_indicators = \
[ widget_types[ty][3](self.get_style())]
if self.current_widget != slide.lines[self.current_line][1]:
ty = self.current_widget.ty
self.layout.line_indicators.append(widget_types[ty][3](self.get_style()))
else :
self.layout.line_indicators = ( u'\u273c', )
self.layout.body_x = self.template['body']['x']
self.layout.body_y = self.template['body']['y']
self.layout.body_w = self.template['body']['width']
self.layout.body_h = self.template['body']['height']
self.slide_list_trvw.queue_draw()
self.layout.queue_draw()
def _slide_to_action_panel(self) :
if self.current_slide is None :
return
self.get_aenv().action_panel.to_action_panel(self.current_slide.action_panel)
def _remake_slide_menu(self) :
children = self.new_slide_menu.get_children()
for r in range(1, len(children)) :
self.new_slide_menu.remove(children[r])
if self.template is None or 'variants' not in self.template :
return
for variant in self.template['variants'] :
variant_settings = self.template['variants'][variant]
new_variant_butt = gtk.MenuItem()
new_variant_butt.add(self.template['variants'][variant]['sample'])
new_variant_butt.set_tooltip_text(variant_settings['title'])
new_variant_butt.connect('activate',
variant_settings['new_slide'])
self.new_slide_menu.append(new_variant_butt)
self.new_slide_menu.show_all()
def aes_add_a(self, aname_root, **parameters) :
if aname_root == 'GlosserSlide' :
return self.new_slide(parameters['num'], 'basic')
elif aname_root in aname_root_to_ty :
return self.insert_widget(aname_root_to_ty[aname_root],
slide_num=int(parameters["on_slide"]))
return aobject.AObject.aes_add_a(self, aname_root, **parameters)
def get_method_window(self) :
vbox = gtk.VBox()
butt_hbox = gtk.HBox()
new_slide_butt = gtk.ToggleButton()
new_slide_butt.set_image(gtk.image_new_from_stock(gtk.STOCK_NEW,
gtk.ICON_SIZE_BUTTON))
new_slide_butt.set_tooltip_text("New slide")
new_slide_menu = gtk.Menu()
new_slide_menu.attach_to_widget(new_slide_butt, None)
new_slide_butt.connect("button_press_event", lambda w, e :\
new_slide_menu.popup(None, None, None, e.button, e.time) \
if not w.get_active() else \
new_slide_menu.popdown())
new_slide_menu.set_title('New Slide')
self.new_slide_menu = new_slide_menu
new_slide_menu.modify_bg(gtk.STATE_NORMAL, gtk.gdk.Color(65535, 65535, 65535))
new_slide_labl = gtk.Label()
new_slide_labl.set_markup('<b>New...</b>')
new_slide_meni = gtk.MenuItem()
new_slide_meni.add(new_slide_labl)
new_slide_menu.append(new_slide_meni)
self._remake_slide_menu()
settings_butt = gtk.Button()
settings_butt.set_image(gtk.image_new_from_stock(gtk.STOCK_PREFERENCES,
gtk.ICON_SIZE_BUTTON))
slide_settings_butt = gtk.Button()
slide_settings_butt.set_image(gtk.image_new_from_stock(gtk.STOCK_PAGE_SETUP,
gtk.ICON_SIZE_BUTTON))
if self.get_aenv() is not None and \
self.get_aenv().action_panel is not None :
settings_butt.connect('clicked', lambda b : \
self.get_aenv().action_panel.to_action_panel(self.action_panel))
slide_settings_butt.connect('clicked', lambda b : \
self._slide_to_action_panel())
else :
win = gtk.Window()
win.add(action_panel)
win.hide()
settings_butt.connect('clicked', lambda b : win.show())
butt_hbox.pack_start(settings_butt, False)
butt_hbox.pack_start(slide_settings_butt, False)
butt_hbox.pack_start(new_slide_butt, False)
vbox.pack_start(butt_hbox, False)
vbox.pack_start(gtk.Label('Slides'), False)
slide_list_trvw = gtk.TreeView(self.slide_list)
slide_list_cllr = GlosserSlideCellRenderer()
slide_list_num_cllr = gtk.CellRendererText()
slide_list_num_tvcl = gtk.TreeViewColumn('#', slide_list_num_cllr)
slide_list_num_tvcl.add_attribute(slide_list_num_cllr, 'text', 0)
slide_list_tvcl = gtk.TreeViewColumn('Slide', slide_list_cllr)
slide_list_tvcl.add_attribute(slide_list_cllr, 'slide', 1)
slide_list_trvw.append_column(slide_list_num_tvcl)
slide_list_trvw.append_column(slide_list_tvcl)
slide_list_scrw = gtk.ScrolledWindow()
slide_list_scrw.add_with_viewport(slide_list_trvw)
slide_list_scrw.set_size_request(-1, 300)
self.slide_list_trvw = slide_list_trvw
slide_list_trvw.connect('row-activated', self._set_slide_from_trvw)
vbox.pack_start(slide_list_scrw)
vbox.show_all()
return vbox
def _set_slide_from_trvw(self, tv, pa, col) :
it = self.slide_list.get_iter(pa)
self.set_slide(self.slide_list.get_value(it, 0)-1)
def _make_new_slide_lambda(self, variant) :
return lambda c: self.new_slide(len(self.slides), variant)
def set_template(self, template_name=None, variant='basic') :
if template_name is None :
template_name = self.default_template
self.template['name'] = template_name
self.template['tar'] = tarfile.open(\
get_share_location()+'templates/glosser/'+template_name+'.tgz', 'r')
self.template['params_xml'] = \
etree.parse(self.template['tar'].extractfile('parameters.xml'))
self.template['params'] = {}
params = ['bgcolor']
root = self.template['params_xml'].getroot()
param_root = root.find('params')
for p in params :
node = param_root.find(p)
self.template['params'][p] = node.text.strip()
self.variant_lsto.clear()
variants_root = root.find('variants')
self.template['variants'] = {}
for variant_node in variants_root :
self.template['variants'][variant_node.tag] = {
'title' : variant_node.get('title'),
'filename' : variant_node.text.strip(),
'new_slide' : self._make_new_slide_lambda(variant_node.tag),
}
var_dict = self.template['variants'][variant_node.tag]
fobj = var_dict['filename']
fobj = self.template['tar'].extractfile(fobj)
file_contents = fobj.read()
var_dict['xml'] =\
etree.ElementTree(etree.fromstring(file_contents))
loader = gtk.gdk.PixbufLoader('svg')
loader.write(file_contents)
loader.set_size(50, 50)
loader.close()
var_dict['sample'] =\
gtk.image_new_from_pixbuf(loader.get_pixbuf())
self.variant_lsto.append((variant_node.tag,
var_dict['title']))
if not self.presentation :
self._remake_slide_menu()
basic_xml = self.template['variants']['basic']['xml']
w = basic_xml.getroot().get('width')
self.template['width'] = _px_to_float(w)
h = basic_xml.getroot().get('height')
self.template['height'] = _px_to_float(h)
self.set_template_variant_xml(variant, force=True)
for w in self.complete_widgets_list :
for sw in w.subwidgets :
sw.modify_bg(gtk.STATE_NORMAL,
gtk.gdk.Color(self.template['params']['bgcolor']))
# Redraw thumbs
for n in range(0, len(self.slides)) :
self.set_slide(n, change_current=False)
# Update slide in view
self.set_slide()
all_templates = {}
template_files = os.listdir(get_share_location()+'templates/glosser/')
template_lsto = gtk.ListStore(str, str, str)
for t in template_files :
if t.endswith('.tgz') :
template_name = t[:-4]
all_templates[template_name] = {}
tar = tarfile.open(\
get_share_location()+'templates/glosser/'+template_name+'.tgz', 'r')
params_xml = \
etree.parse(tar.extractfile('parameters.xml'))
root = params_xml.getroot()
param_root = root.find('params')
for node in param_root :
all_templates[template_name][node.tag] = node.text.strip()
template_lsto.append((template_name,
all_templates[template_name]['title'],
all_templates[template_name]['description'],))
aobject.aname_root_catalog['Glosser'] = Glosser | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glosser/Glosser.py | Glosser.py |
import gtk
from matplotlib.backends.backend_cairo import RendererCairo
import pangocairo
from aobject.utils import *
import pango
import gobject
from .. import glypher
import copy
from lxml import etree
import cairo
from aobject import aobject
from aobject.paths import *
from ..tablemaker import *
import rsvg
import StringIO
def render_stock(style, stockid) :
icon_set = style.lookup_icon_set(stockid)
pixbuf = icon_set.render_icon(style,
gtk.TEXT_DIR_NONE,
gtk.STATE_NORMAL,
gtk.ICON_SIZE_SMALL_TOOLBAR,
None,
None)
return pixbuf
class GlosserWidget(aobject.AObject) :
x = 0
y = 0
h = 0
w = 0
design_widget = None
presentation_widget = None
__gsignals__ = {
"redraw-request" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, () )}
current_scaling = 1.
attached = True
suspend = False
visible = True
container = False
initial_spronk_fns = None
initial_hide = False
def get_auto_aesthete_properties(self) :
return {
'x' : (float,), 'y' : (float,), 'h' : (float,), 'w' : (float,),
}
def aes_get_parameters(self) :
return { 'on_slide' : self.slide.num }
def initial_spronk(self) :
debug_print(self.initial_hide)
if self.initial_hide :
self.hide()
for fn in self.initial_spronk_fns :
fn()
def show(self) :
self.presentation_widget.show()
def hide(self) :
self.presentation_widget.hide()
def get_visible(self) :
return self.visible
def remove_from_layouts(self) :
if not self.attached :
return
self.attached = False
for i in (0,1) :
self.layouts[i].remove(self.subwidgets[i])
def restore_to_layouts(self) :
if self.attached :
return
for i in (0,1) :
to_pos = map(int, self.layouts[i].translate_body_pos(self.x,
self.y))
self.layouts[i].put(self.subwidgets[i], *to_pos)
self.attached = True
def __init__(self, slide, design_layout, presentation_layout, name_root='GlosserWidget', env=None) :
aobject.AObject.__init__(self, name_root=name_root,
env=env,
view_object=False,
elevate=False)
self.slide = slide
self.current_scaling = [1.,1.]
self.layout_conn = [-1, -1]
self.initial_spronk_fns = []
#[presentation_layout.translate_dist(1.),
# design_layout.translate_dist(1.)]
#[presentation_layout.translate_pos(presentation_layout.body_x,presentation_layout.body_y,rev=True),
# design_layout.translate_pos(design_layout.body_x,design_layout.body_y,rev=True)]
self.layouts = [presentation_layout, design_layout]
self.design_widget.connect_after("expose-event", lambda w, e :
self.presentation_widget.queue_draw())
self.subwidgets = [self.presentation_widget, self.design_widget]
for i in (0, 1) :
self.layout_conn[i] = self.layouts[i].connect_after("size-allocate",
self.do_layout_size_allocate, i)
self.layouts[i].put(self.subwidgets[i], 0, 0)
self.subwidgets[i].show()
self.move(0, 0, i)
self.design_widget.connect("size-allocate", lambda w, a :
self.update_from_design_widget())
def move(self, x=None, y=None, subwidget=None) :
if self.suspend :
return
self.suspend = True
if x is None :
x = self.x
if y is None :
y = self.y
self.x = x
self.y = y
do_redraw = False
for i in (subwidget,) if subwidget is not None else (1,0) :
sw = self.subwidgets[i]
self.layouts[i].handler_block(self.layout_conn[i])
x, y = map(int, self.layouts[i].translate_body_pos(self.x,self.y))
if self.attached and (x != sw.allocation.x or y != sw.allocation.y) :
self.layouts[i].move(sw, x, y)
do_redraw = True
self.layouts[i].handler_unblock(self.layout_conn[i])
self.suspend = False
if do_redraw :
self.emit("redraw-request")
def update_from_design_widget(self) :
if self.suspend :
return
al = self.design_widget.get_allocation()
layout = self.layouts[GLOSSER_WIDGET_DESIGN]
if al.x > 0 or al.y > 0 :
x, y = layout.translate_body_pos(al.x, al.y, rev=True)
self.move(x, y, subwidget=GLOSSER_WIDGET_PRESENTATION)
if al.width > 0 or al.height > 0:
w = layout.translate_dist(al.width, rev=True)
h = layout.translate_dist(al.height, rev=True)
self.resize(w, h, subwidget=GLOSSER_WIDGET_PRESENTATION)
def move_resize(self, subwidget=None) :
self.resize(self.w, self.h, subwidget=subwidget)
self.move(self.x, self.y, subwidget=subwidget)
def resize(self, w=None, h=None, subwidget=None) :
if self.suspend :
return
self.suspend = True
if w is None :
w = self.w
if h is None :
h = self.h
self.w = w
self.h = h
do_redraw = False
for i in (subwidget,) if subwidget is not None else (1,0) :
self.layouts[i].handler_block(self.layout_conn[i])
sw = self.subwidgets[i]
w = self.layouts[i].translate_dist(self.w)
h = self.layouts[i].translate_dist(self.h)
if int(w) != sw.allocation.width or int(h) != sw.allocation.height :
sw.set_size_request(int(w), int(h))
do_redraw = True
self.layouts[i].handler_unblock(self.layout_conn[i])
if do_redraw :
self.emit("redraw-request")
self.suspend = False
old_origin = None
old_rat = None
def do_layout_size_allocate(self, layout, allocation, subwidget) :
origin = layout.translate_body_pos(0, 0)
rat = layout.translate_dist(1)
if origin == self.old_origin and rat == self.old_rat :
return
self.old_origin = origin
self.old_rat = rat
if rat != self.current_scaling[subwidget] :
self.rescale(subwidget, rat)
else :
self.move_resize(subwidget)
def rescale_action(self, subwidget, rat) :
pass
def rescale(self, subwidget, rat) :
if self.suspend :
return
self.current_scaling[subwidget] = rat
self.move_resize(subwidget)
self.rescale_action(subwidget, rat)
def get_action_panel(self) :
return None
def presentation_draw(self, cr, scaling=None, ignore_spronks=True,
final=None) :
if not ignore_spronks and not self.presentation_widget.get_visible() :
return False
if final is None :
self.do_presentation_draw(cr, scaling=scaling)
else :
self.do_presentation_draw(cr, scaling=scaling, final=final)
return True
class GlosserPresentationImage(gtk.DrawingArea) :
scaling = 1
draw_fn = None
__gsignals__ = { "expose-event" : "override"}
def __init__(self, draw_fn) :
self.draw_fn = draw_fn
gtk.DrawingArea.__init__(self)
def rescale(self, rat) :
self.scaling = rat
self.queue_draw()
def do_expose_event(self, event):
cr = self.window.cairo_create()
cr.rectangle ( event.area.x, event.area.y, event.area.width, event.area.height)
cr.clip()
cr.scale(self.scaling, self.scaling)
self.draw_fn(cr, self.scaling, final=True)
GLOSSER_WIDGET_PRESENTATION = 0
GLOSSER_WIDGET_DESIGN = 1 | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glosser/GlosserWidget.py | GlosserWidget.py |
import gtk
from matplotlib.backends.backend_cairo import RendererCairo
import pangocairo
from aobject.utils import *
import pango
import gobject
from .. import glypher
import copy
from lxml import etree
import cairo
from aobject import aobject
from aobject.paths import *
from ..tablemaker import *
import rsvg
import StringIO
from GlosserWidget import *
#class GlosserTextView_via_widget(gtk.TextView, GlosserWidget) :
# initial_font_size = 25
# def __init__(self, env=None) :
# self.design_widget = self
# self.presentation_widget =\
# GlosserPresentationWidget(pres_text_view,
# self.allocation.x,
# self.allocation.y,
# self.allocation.width,
# self.allocation.height)
# gtk.TextView.__init__(self)
# GlosserWidget.__init__(self, name_root='GlosserTextView',
# env=env)
# self.set_wrap_mode(gtk.WRAP_WORD_CHAR)
# pres_text_view = gtk.TextView(self.get_buffer())
# pres_text_view.modify_font(pango.FontDescription(\
# "Linux Libertine %d"%int(self.initial_font_size)))
# self.get_buffer().connect_after("changed", lambda w :\
# self.emit("redraw-request"))
# def rescale(self, rat) :
# GlosserWidget.rescale(self, rat)
# self.modify_font(pango.FontDescription(\
# "Linux Libertine %d"%int(self.initial_font_size*rat)))
# def presentation_draw(self, cr, scaling=None) :
# return self.presentation_widget.draw(cr, scaling)
class GlosserTextView(GlosserWidget) :
ty = 'textview'
initial_font_size = 25
def get_auto_aesthete_properties(self) :
d = GlosserWidget.get_auto_aesthete_properties(self)
d.update({'text' : (str,), 'font' : (str,)})
return d
def get_font(self) :
return self.font_desc.to_string()
def make_action_panel(self) :
textview_table_maker = PreferencesTableMaker()
textview_table_maker.append_row("Font",
self.aes_method_font_button('font'))
win = textview_table_maker.make_table()
win.aes_title = "Glancer View"
win.show_all()
return win
def change_font(self, val) :
self.font_desc = pango.FontDescription(val)
self.rescale_action(GLOSSER_WIDGET_DESIGN,
self.current_scaling[GLOSSER_WIDGET_DESIGN])
self.presentation_widget.font_desc = self.font_desc
self.presentation_widget.queue_draw()
def __init__(self, slide, design_layout, presentation_layout, env=None) :
self.font_desc = pango.FontDescription(\
"Linux Libertine %d"%int(self.initial_font_size))
self.design_widget = gtk.TextView()
#self.design_widget.set_wrap_mode(gtk.WRAP_WORD_CHAR)
self.presentation_widget =\
GlosserPresentationTextView(self.design_widget.get_buffer(),
self.font_desc,
self.design_widget.allocation.x,
self.design_widget.allocation.y,
self.design_widget.allocation.width,
self.design_widget.allocation.height)
GlosserWidget.__init__(self,
slide,
design_layout,
presentation_layout,
name_root='GlosserTextView',
env=env)
self.design_widget.get_buffer().connect_after("changed", lambda w :\
self.emit("redraw-request"))
self.action_panel = self.make_action_panel()
self.design_widget.connect("focus-in-event", lambda w, e :
self.get_aenv().action_panel.to_action_panel(self.action_panel))
def get_text(self) :
w = self.design_widget.get_buffer()
return w.get_text(w.get_start_iter(), w.get_end_iter())
def change_text(self, val) :
self.design_widget.get_buffer().set_text(val)
def rescale_action(self, subwidget, rat) :
debug_print(self.initial_font_size*rat)
if subwidget == GLOSSER_WIDGET_PRESENTATION :
self.subwidgets[subwidget].rescale(rat)
elif subwidget == GLOSSER_WIDGET_DESIGN :
fd = pango.FontDescription(self.font_desc.to_string())
fd.set_size(int(fd.get_size()*rat))
self.subwidgets[subwidget].modify_font(fd)
def do_presentation_draw(self, cr, scaling=None) :
return self.presentation_widget.draw(cr, scaling)
class GlosserPresentationTextView(gtk.DrawingArea) :
x = 0
y = 0
__gsignals__ = { "expose-event" : "override" }
scaling = 1
draw_fn = None
def __init__(self, text_buffer, font_desc, x, y, w, h) :
self.text_buffer = text_buffer
self.font_desc = font_desc
gtk.DrawingArea.__init__(self)
self.x = x
self.y = y
self.w = w
self.h = h
self.rescale(1)
def rescale(self, rat=None) :
if rat is None :
rat = self.scaling
self.scaling = rat
def do_expose_event(self, event):
cr = self.window.cairo_create()
cr.rectangle ( event.area.x, event.area.y, event.area.width, event.area.height)
cr.clip()
self.draw(cr, self.scaling)
def draw(self, cr, scaling) :
cr.scale(scaling, scaling)
cr.move_to(0, 0)
pcr = pangocairo.CairoContext(cr)
layout = pcr.create_layout()
layout.set_font_description(self.font_desc)
layout.set_wrap(pango.WRAP_WORD_CHAR)
s = self.text_buffer.get_start_iter()
e = self.text_buffer.get_end_iter()
layout.set_text(self.text_buffer.get_text(s, e))
pcr.show_layout(layout) | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/glosser/TextView.py | TextView.py |
import gtk
import re
import sys
import os
import pango
from StringIO import StringIO
try:
import IPython
except Exception,e:
raise RuntimeError("Error importing IPython (%s)" % str(e))
ansi_colors = {'0;30': 'Black',
'0;31': 'Red',
'0;32': 'Green',
'0;33': 'Brown',
'0;34': 'Blue',
'0;35': 'Purple',
'0;36': 'Cyan',
'0;37': 'LightGray',
'1;30': 'DarkGray',
'1;31': 'DarkRed',
'1;32': 'SeaGreen',
'1;33': 'Yellow',
'1;34': 'LightBlue',
'1;35': 'MediumPurple',
'1;36': 'LightCyan',
'1;37': 'White'}
class IterableIPShell:
def __init__(self,argv=None,user_ns=None,user_global_ns=None,
cin=None, cout=None,cerr=None, input_func=None):
if input_func:
IPython.iplib.raw_input_original = input_func
if cin:
IPython.Shell.Term.cin = cin
if cout:
IPython.Shell.Term.cout = cout
if cerr:
IPython.Shell.Term.cerr = cerr
if argv is None:
argv=[]
# This is to get rid of the blockage that occurs during
# IPython.Shell.InteractiveShell.user_setup()
IPython.iplib.raw_input = lambda x: None
self.term = IPython.genutils.IOTerm(cin=cin, cout=cout, cerr=cerr)
os.environ['TERM'] = 'dumb'
excepthook = sys.excepthook
self.IP = IPython.Shell.make_IPython(argv,user_ns=user_ns,
user_global_ns=user_global_ns,
embedded=True,
shell_class=IPython.Shell.InteractiveShell)
self.IP.system = lambda cmd: self.shell(self.IP.var_expand(cmd),
header='IPython system call: ',
verbose=self.IP.rc.system_verbose)
sys.excepthook = excepthook
self.iter_more = 0
self.history_level = 0
self.complete_sep = re.compile('[\s\{\}\[\]\(\)]')
def execute(self):
self.history_level = 0
orig_stdout = sys.stdout
sys.stdout = IPython.Shell.Term.cout
try:
line = self.IP.raw_input(None, self.iter_more)
if self.IP.autoindent:
self.IP.readline_startup_hook(None)
except KeyboardInterrupt:
self.IP.write('\nKeyboardInterrupt\n')
self.IP.resetbuffer()
# keep cache in sync with the prompt counter:
self.IP.outputcache.prompt_count -= 1
if self.IP.autoindent:
self.IP.indent_current_nsp = 0
self.iter_more = 0
except:
self.IP.showtraceback()
else:
self.iter_more = self.IP.push(line)
if (self.IP.SyntaxTB.last_syntax_error and
self.IP.rc.autoedit_syntax):
self.IP.edit_syntax_error()
if self.iter_more:
self.prompt = str(self.IP.outputcache.prompt2).strip()
if self.IP.autoindent:
self.IP.readline_startup_hook(self.IP.pre_readline)
else:
self.prompt = str(self.IP.outputcache.prompt1).strip()
sys.stdout = orig_stdout
def historyBack(self):
self.history_level -= 1
return self._getHistory()
def historyForward(self):
self.history_level += 1
return self._getHistory()
def _getHistory(self):
try:
rv = self.IP.user_ns['In'][self.history_level].strip('\n')
except IndexError:
self.history_level = 0
rv = ''
return rv
def updateNamespace(self, ns_dict):
self.IP.user_ns.update(ns_dict)
def complete(self, line):
split_line = self.complete_sep.split(line)
possibilities = self.IP.complete(split_line[-1])
if possibilities:
common_prefix = reduce(self._commonPrefix, possibilities)
completed = line[:-len(split_line[-1])]+common_prefix
else:
completed = line
return completed, possibilities
def _commonPrefix(self, str1, str2):
for i in range(len(str1)):
if not str2.startswith(str1[:i+1]):
return str1[:i]
return str1
def shell(self, cmd,verbose=0,debug=0,header=''):
stat = 0
if verbose or debug: print header+cmd
# flush stdout so we don't mangle python's buffering
if not debug:
input, output = os.popen4(cmd)
print output.read()
output.close()
input.close()
class ConsoleView(gtk.TextView):
def __init__(self):
gtk.TextView.__init__(self)
self.modify_font(pango.FontDescription('Mono'))
self.set_cursor_visible(True)
self.text_buffer = self.get_buffer()
self.mark = self.text_buffer.create_mark('scroll_mark',
self.text_buffer.get_end_iter(),
False)
for code in ansi_colors:
self.text_buffer.create_tag(code,
foreground=ansi_colors[code],
weight=700)
self.text_buffer.create_tag('0')
self.text_buffer.create_tag('notouch', editable=False)
self.color_pat = re.compile('\x01?\x1b\[(.*?)m\x02?')
self.line_start = \
self.text_buffer.create_mark('line_start',
self.text_buffer.get_end_iter(), True
)
self.connect('key-press-event', self._onKeypress)
self.last_cursor_pos = 0
def write(self, text, editable=False):
segments = self.color_pat.split(text)
segment = segments.pop(0)
start_mark = self.text_buffer.create_mark(None,
self.text_buffer.get_end_iter(),
True)
self.text_buffer.insert(self.text_buffer.get_end_iter(), segment)
if segments:
ansi_tags = self.color_pat.findall(text)
for tag in ansi_tags:
i = segments.index(tag)
self.text_buffer.insert_with_tags_by_name(self.text_buffer.get_end_iter(),
segments[i+1], tag)
segments.pop(i)
if not editable:
self.text_buffer.apply_tag_by_name('notouch',
self.text_buffer.get_iter_at_mark(start_mark),
self.text_buffer.get_end_iter())
self.text_buffer.delete_mark(start_mark)
self.scroll_mark_onscreen(self.mark)
def showPrompt(self, prompt):
self.write(prompt)
self.text_buffer.move_mark(self.line_start,self.text_buffer.get_end_iter())
def changeLine(self, text):
iter = self.text_buffer.get_iter_at_mark(self.line_start)
iter.forward_to_line_end()
self.text_buffer.delete(self.text_buffer.get_iter_at_mark(self.line_start), iter)
self.write(text, True)
def getCurrentLine(self):
rv = self.text_buffer.get_slice(self.text_buffer.get_iter_at_mark(self.line_start),
self.text_buffer.get_end_iter(), False)
return rv
def showReturned(self, text):
iter = self.text_buffer.get_iter_at_mark(self.line_start)
iter.forward_to_line_end()
self.text_buffer.apply_tag_by_name('notouch',
self.text_buffer.get_iter_at_mark(self.line_start),
iter)
self.write('\n'+text)
if text:
self.write('\n')
self.showPrompt(self.prompt)
self.text_buffer.move_mark(self.line_start,self.text_buffer.get_end_iter())
self.text_buffer.place_cursor(self.text_buffer.get_end_iter())
def _onKeypress(self, obj, event):
if not event.string:
return
insert_mark = self.text_buffer.get_insert()
insert_iter = self.text_buffer.get_iter_at_mark(insert_mark)
selection_mark = self.text_buffer.get_selection_bound()
selection_iter = self.text_buffer.get_iter_at_mark(selection_mark)
start_iter = self.text_buffer.get_iter_at_mark(self.line_start)
if start_iter.compare(insert_iter) <= 0 and \
start_iter.compare(selection_iter) <= 0:
return
elif start_iter.compare(insert_iter) > 0 and \
start_iter.compare(selection_iter) > 0:
self.text_buffer.place_cursor(start_iter)
elif insert_iter.compare(selection_iter) < 0:
self.text_buffer.move_mark(insert_mark, start_iter)
elif insert_iter.compare(selection_iter) > 0:
self.text_buffer.move_mark(selection_mark, start_iter)
class IPythonView(ConsoleView, IterableIPShell):
def __init__(self):
ConsoleView.__init__(self)
self.cout = StringIO()
IterableIPShell.__init__(self, cout=self.cout,cerr=self.cout,
input_func=self.raw_input)
self.connect('key_press_event', self.keyPress)
self.execute()
self.cout.truncate(0)
self.showPrompt(self.prompt)
self.interrupt = False
def raw_input(self, prompt=''):
if self.interrupt:
self.interrupt = False
raise KeyboardInterrupt
return self.getCurrentLine()
def keyPress(self, widget, event):
if event.state & gtk.gdk.CONTROL_MASK and event.keyval == 99:
self.interrupt = True
self._processLine()
return True
elif event.keyval == gtk.keysyms.Return:
self._processLine()
return True
elif event.keyval == gtk.keysyms.Up:
self.changeLine(self.historyBack())
return True
elif event.keyval == gtk.keysyms.Down:
self.changeLine(self.historyForward())
return True
elif event.keyval == gtk.keysyms.Tab:
if not self.getCurrentLine().strip():
return False
completed, possibilities = self.complete(self.getCurrentLine())
if len(possibilities) > 1:
slice = self.getCurrentLine()
self.write('\n')
for symbol in possibilities:
self.write(symbol+'\n')
self.showPrompt(self.prompt)
self.changeLine(completed or slice)
return True
def _processLine(self):
self.history_pos = 0
self.execute()
rv = self.cout.getvalue()
if rv: rv = rv.strip('\n')
self.showReturned(rv)
self.cout.truncate(0) | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/third_party/ipython_view_10.py | ipython_view_10.py |
import gtk
import re
import sys
import os
import pango
from StringIO import StringIO
try:
import IPython
except Exception,e:
raise RuntimeError("Error importing IPython (%s)" % str(e))
ansi_colors = {'0;30': 'Black',
'0;31': 'Red',
'0;32': 'Green',
'0;33': 'Brown',
'0;34': 'Blue',
'0;35': 'Purple',
'0;36': 'Cyan',
'0;37': 'LightGray',
'1;30': 'DarkGray',
'1;31': 'DarkRed',
'1;32': 'SeaGreen',
'1;33': 'Yellow',
'1;34': 'LightBlue',
'1;35': 'MediumPurple',
'1;36': 'LightCyan',
'1;37': 'White'}
class IterableIPShell:
def __init__(self,argv=None,user_ns=None,user_global_ns=None,
stdin=None, stdout=None,stderr=None, input_func=None):
if input_func:
IPython.frontend.terminal.interactiveshell.raw_input_original = input_func
if stdin:
IPython.utils.io.stdin = stdin
if stdout:
IPython.utils.io.stdout = stdout
if stderr:
IPython.utils.io.stderr = stderr
if argv is None:
argv=[]
# This is to get rid of the blockage that occurs during
# IPython.Shell.InteractiveShell.user_setup()
#IPython.frontend.terminal.interactiveshell.TerminalInteractiveShell.raw_input = lambda x: None
self.term = IPython.utils.io.IOTerm(stdin=stdin, stdout=stdout, stderr=stderr)
os.environ['TERM'] = 'dumb'
excepthook = sys.excepthook
#self.IP = IPython.Shell.make_IPython(argv,user_ns=user_ns,
# user_global_ns=user_global_ns,
# embedded=True,
# shell_class=IPython.Shell.InteractiveShell)
orig_stdout = sys.stdout
sys.stdout = IPython.utils.io.stdout
orig_stderr = sys.stderr
sys.stderr = IPython.utils.io.stderr
self.IP = IPython.frontend.terminal.interactiveshell.TerminalInteractiveShell(user_ns=user_ns,
user_global_ns=user_global_ns)
sys.stdout = orig_stdout
sys.stderr = orig_stderr
self.IP.embedded = True
self.IP.system = lambda cmd: self.shell(self.IP.var_expand(cmd),
header='IPython system call: ',
verbose=self.IP.rc.system_verbose)
sys.excepthook = excepthook
self.iter_more = 0
self.history_level = 0
self.complete_sep = re.compile('[\s\{\}\[\]\(\)]')
def execute(self):
self.history_level = 0
orig_stdout = sys.stdout
sys.stdout = IPython.utils.io.stdout
try:
line = self.IP.raw_input()
if self.IP.autoindent:
self.IP.readline_startup_hook(None)
except KeyboardInterrupt:
self.IP.write('\nKeyboardInterrupt\n')
self.IP.resetbuffer()
# keep cache in sync with the prompt counter:
self.IP.displayhook.prompt_count -= 1
if self.IP.autoindent:
self.IP.indent_current_nsp = 0
self.iter_more = 0
except:
self.IP.showtraceback()
else:
self.iter_more = self.IP.run_cell(line)
if (self.IP.SyntaxTB.last_syntax_error and
self.IP.rc.autoedit_syntax):
self.IP.edit_syntax_error()
if self.iter_more:
self.prompt = str(self.IP.displayhook.prompt2).strip()
if self.IP.autoindent:
self.IP.readline_startup_hook(self.IP.pre_readline)
else:
self.prompt = str(self.IP.displayhook.prompt1).strip()
sys.stdout = orig_stdout
def historyBack(self):
self.history_level -= 1
return self._getHistory()
def historyForward(self):
self.history_level += 1
return self._getHistory()
def _getHistory(self):
try:
rv = self.IP.user_ns['In'][self.history_level].strip('\n')
except IndexError:
self.history_level = 0
rv = ''
return rv
def updateNamespace(self, ns_dict):
self.IP.user_ns.update(ns_dict)
def complete(self, line):
split_line = self.complete_sep.split(line)
possibilities = self.IP.complete(split_line[-1])
if possibilities:
common_prefix = reduce(self._commonPrefix, possibilities)
completed = line[:-len(split_line[-1])]+common_prefix
else:
completed = line
return completed, possibilities
def _commonPrefix(self, str1, str2):
for i in range(len(str1)):
if not str2.startswith(str1[:i+1]):
return str1[:i]
return str1
def shell(self, cmd,verbose=0,debug=0,header=''):
stat = 0
if verbose or debug: print header+cmd
# flush stdout so we don't mangle python's buffering
if not debug:
input, output = os.popen4(cmd)
print output.read()
output.close()
input.close()
class ConsoleView(gtk.TextView):
def __init__(self):
gtk.TextView.__init__(self)
self.modify_font(pango.FontDescription('Mono'))
self.set_cursor_visible(True)
self.text_buffer = self.get_buffer()
self.mark = self.text_buffer.create_mark('scroll_mark',
self.text_buffer.get_end_iter(),
False)
for code in ansi_colors:
self.text_buffer.create_tag(code,
foreground=ansi_colors[code],
weight=700)
self.text_buffer.create_tag('0')
self.text_buffer.create_tag('notouch', editable=False)
self.color_pat = re.compile('\x01?\x1b\[(.*?)m\x02?')
self.line_start = \
self.text_buffer.create_mark('line_start',
self.text_buffer.get_end_iter(), True
)
self.connect('key-press-event', self._onKeypress)
self.last_cursor_pos = 0
def write(self, text, editable=False):
segments = self.color_pat.split(text)
segment = segments.pop(0)
start_mark = self.text_buffer.create_mark(None,
self.text_buffer.get_end_iter(),
True)
self.text_buffer.insert(self.text_buffer.get_end_iter(), segment)
if segments:
ansi_tags = self.color_pat.findall(text)
for tag in ansi_tags:
i = segments.index(tag)
self.text_buffer.insert_with_tags_by_name(self.text_buffer.get_end_iter(),
segments[i+1], tag)
segments.pop(i)
if not editable:
self.text_buffer.apply_tag_by_name('notouch',
self.text_buffer.get_iter_at_mark(start_mark),
self.text_buffer.get_end_iter())
self.text_buffer.delete_mark(start_mark)
self.scroll_mark_onscreen(self.mark)
def showPrompt(self, prompt):
self.write(prompt)
self.text_buffer.move_mark(self.line_start,self.text_buffer.get_end_iter())
def changeLine(self, text):
iter = self.text_buffer.get_iter_at_mark(self.line_start)
iter.forward_to_line_end()
self.text_buffer.delete(self.text_buffer.get_iter_at_mark(self.line_start), iter)
self.write(text, True)
def getCurrentLine(self):
rv = self.text_buffer.get_slice(self.text_buffer.get_iter_at_mark(self.line_start),
self.text_buffer.get_end_iter(), False)
return rv
def showReturned(self, text):
iter = self.text_buffer.get_iter_at_mark(self.line_start)
iter.forward_to_line_end()
self.text_buffer.apply_tag_by_name('notouch',
self.text_buffer.get_iter_at_mark(self.line_start),
iter)
self.write('\n'+text)
if text:
self.write('\n')
self.showPrompt(self.prompt)
self.text_buffer.move_mark(self.line_start,self.text_buffer.get_end_iter())
self.text_buffer.place_cursor(self.text_buffer.get_end_iter())
def _onKeypress(self, obj, event):
if not event.string:
return
insert_mark = self.text_buffer.get_insert()
insert_iter = self.text_buffer.get_iter_at_mark(insert_mark)
selection_mark = self.text_buffer.get_selection_bound()
selection_iter = self.text_buffer.get_iter_at_mark(selection_mark)
start_iter = self.text_buffer.get_iter_at_mark(self.line_start)
if start_iter.compare(insert_iter) <= 0 and \
start_iter.compare(selection_iter) <= 0:
return
elif start_iter.compare(insert_iter) > 0 and \
start_iter.compare(selection_iter) > 0:
self.text_buffer.place_cursor(start_iter)
elif insert_iter.compare(selection_iter) < 0:
self.text_buffer.move_mark(insert_mark, start_iter)
elif insert_iter.compare(selection_iter) > 0:
self.text_buffer.move_mark(selection_mark, start_iter)
class IPythonView(ConsoleView, IterableIPShell):
def __init__(self):
ConsoleView.__init__(self)
self.stdout = StringIO()
self.interrupt = False
IterableIPShell.__init__(self, stdout=self.stdout,stderr=self.stdout,
input_func=self.raw_input)
self.connect('key_press_event', self.keyPress)
self.execute()
self.stdout.truncate(0)
self.showPrompt(self.prompt)
def raw_input(self, prompt=''):
if self.interrupt:
self.interrupt = False
raise KeyboardInterrupt
return self.getCurrentLine()
def keyPress(self, widget, event):
if event.state & gtk.gdk.CONTROL_MASK and event.keyval == 99:
self.interrupt = True
self._processLine()
return True
elif event.keyval == gtk.keysyms.Return:
self._processLine()
return True
elif event.keyval == gtk.keysyms.Up:
self.changeLine(self.historyBack())
return True
elif event.keyval == gtk.keysyms.Down:
self.changeLine(self.historyForward())
return True
elif event.keyval == gtk.keysyms.Tab:
if not self.getCurrentLine().strip():
return False
completed, possibilities = self.complete(self.getCurrentLine())
if len(possibilities) > 1:
slice = self.getCurrentLine()
self.write('\n')
for symbol in possibilities:
self.write(symbol+'\n')
self.showPrompt(self.prompt)
self.changeLine(completed or slice)
return True
def _processLine(self):
self.history_pos = 0
self.execute()
rv = self.stdout.getvalue()
if rv: rv = rv.strip('\n')
self.showReturned(rv)
self.stdout.truncate(0) | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/third_party/ipython_view_11.py | ipython_view_11.py |
import re
from aobject.utils import debug_print
import numpy
import gobject
import gtk
import os
import random
from Source import Source
from aobject.aobject import AObject, aname_root_catalog, AOBJECT_CAN_NONE
try :
import vtk as pyvtk
except :
have_vtk = False
else :
have_vtk = True
class SourceVTK (Source) :
vals = None
dim = 2
filename = ""
sort = True
last_write = None
needs_reloaded = False
def __init__(self, filename, env = None) :
Source.__init__(self, "VTK", env, show = False, reloadable = True)
self.filename = filename
self.reader = pyvtk.vtkXMLUnstructuredGridReader()
self.reader.SetFileName(self.filename)
self.set_aname_nice(os.path.basename(filename) + ' [' + filename + ']')
self.source_reload()
gobject.timeout_add(5000, self.source_check)
if not have_vtk :
raise RuntimeError("Don't have VTK Python package")
def source_type(self) :
return 'scatter'
def source_check(self) :
try : mtime = os.stat(self.filename).st_mtime
except (OSError) : return True
if not self.needs_reloaded and self.last_write < mtime :
self.needs_reloaded = True
self.emit("aes_needs_reloaded_status_change", True)
return True
def is_needs_reloaded(self) : return self.needs_reloaded
def source_reload(self) :
vals = []
self.needs_reloaded = False
self.last_write = os.stat(self.filename).st_mtime
self.reader.Update()
grid = self.reader.GetOutput()
points = grid.GetPoints()
num_points = int(points.GetNumberOfPoints())
for i in range(0, num_points) :
point_array = points.GetPoint(i)
vals.append(point_array)
multicol = True
self.vals = vals
Source.source_reload(self)
def source_get_max_dim(self) : return self.dim
def source_get_values(self, time = None, multi_array = False, x_range = None) :
if multi_array :
return [{'values':self.vals,'name':self.get_aname_nice()}]
else : return self.vals
def VTK_factory(filename, env = None) :
vtk = SourceVTK(filename, env)
return vtk | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/sources/VTK.py | VTK.py |
from sympy.core.function import Lambda as aes_Lambda
import pickle
import numpy
import lxml.etree as ET
from aobject.aobject import AObject, aname_root_catalog, AOBJECT_CAN_NONE, string_to_float_tup
from aobject.utils import debug_print
from Source import Source
class FunctionSource(Source) :
function = None
needs_resolution = True
xlim = None
def aes_get_parameters(self) :
return {
'resolution' : self.resolution
}
def get_auto_aesthete_properties(self) :
d = Source.get_auto_aesthete_properties(self)
d.update({
"xlim" : (string_to_float_tup, (AOBJECT_CAN_NONE,))
})
return d
i = 0
def change_xlim(self, new_xlim) :
self.xlim = new_xlim
self.i += 1
if self.i > 4 :
quit()
self.source_reload()
def source_type(self) :
return 'line'
def __init__(self, function, max_dim, limits=None, env = None, stem='FunctionSource', show = False,
reloadable=True, resolution = 10) :
self.resolution = resolution
self.function = function
self.max_dim = max_dim
#FIXME : moving ranges for 3D and buffering for both
needs_x_range = max_dim==2
Source.__init__(self, stem, env, show, reloadable,
needs_x_range=needs_x_range)
if limits is not None :
self.set_xlim((str(limits[1]), str(limits[2])))
def source_get_values(self, time = None, multi_array = False,
x_range=None, y_range=(0, 1.), resolution=10.) :
f = self.function
if self.xlim is not None and x_range is None:
x_range = map(float, self.xlim)
if x_range is None :
x_range = (0., 1.)
self.current_x_range = x_range
if resolution < 1 :
resolution = 10.
xa = numpy.arange(x_range[0], x_range[1], (x_range[1]-x_range[0])/resolution)
if self.source_get_max_dim() == 3 :
ya = numpy.arange(y_range[0], y_range[1],
(y_range[1]-y_range[0])/resolution)
xc = len(xa)
yc = len(ya)
xa, ya = numpy.meshgrid(xa, ya)
p = xa.copy()
for ind, val in numpy.ndenumerate(p) :
try :
p[ind] = f(xa[ind], ya[ind])
except :
p[ind] = numpy.nan
if multi_array :
return [{'values':p,'x':xa,'y':ya,'name':self.get_aname_nice()}]
else :
p = numpy.array([ f(x) for x in xa ])
if multi_array :
return [{'values':p,'x':xa,'name':self.get_aname_nice()}]
return p
def source_get_max_dim(self) :
return self.max_dim
class SympySource(FunctionSource) :
def aes_get_parameters(self) :
d = FunctionSource.aes_get_parameters(self)
d.update({'sympy_function_pickled' : self.get_sympy_function_pickled()})
return d
def get_auto_aesthete_properties(self) :
d = FunctionSource.get_auto_aesthete_properties(self)
d.update({ 'sympy_function_pickled' : (str,) })
return d
def get_sympy_function_pickled(self) :
return pickle.dumps(self.sympy_function)
def change_sympy_function_pickled(self, var) :
f = pickle.loads(var)
self.set_function_from_sympy(f)
def set_function_from_sympy(self, f, source_reload=True) :
self.sympy_function = f
if hasattr(f, "free_symbols") :
syms = f.free_symbols
else :
syms = f.atoms(Dynamic.Symbol)
#args = list(syms)+[f]
symbols = len(f.free_symbols)+1
f = aes_Lambda(syms, f)
self.function = lambda *args : f(*args).evalf()
self.max_dim = symbols
if source_reload :
self.source_reload()
def __init__(self, f, limits=None, env = None, show = False,
reloadable = False, resolution = 10) :
self.resolution = resolution
self.set_function_from_sympy(f, source_reload=False)
if limits is not None :
limits = (limits[0], limits[1].evalf(), limits[2].evalf())
FunctionSource.__init__(self, self.function, self.max_dim,
stem='SympySource',
limits=limits,
env=env,
show=show,
reloadable=reloadable,
resolution=resolution)
@classmethod
def aes_load_a(cls, env, **parameters) :
f = pickle.loads(parameters['sympy_function_pickled'])
return cls(f, env=env,
resolution=int(parameters['resolution']))
aname_root_catalog['SympySource'] = SympySource.aes_load_a | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/sources/Function.py | Function.py |
from sympy.core.function import Lambda as aes_Lambda
from aobject import aobject
from VTK import SourceVTK, VTK_factory
from CSV import CSV, CSV_factory
import lxml.etree as ET
import gtk
import os
import gobject
from aobject import details
from aobject.utils import debug_print
from .. import glypher
class SourceImporter(gtk.FileChooserDialog) :
last_dir = os.path.expanduser('~')
def __init__(self) :
gtk.FileChooserDialog.__init__(self,
title="Import Source", action=gtk.FILE_CHOOSER_ACTION_OPEN,
buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK))
@classmethod
def run_chooser(cls, env = None) :
chooser = cls()
chooser.set_current_folder(cls.last_dir)
chooser.set_default_response(gtk.RESPONSE_OK)
chooser.set_select_multiple(True)
#extra_hbox = gtk.HBox()
#extra_hbox.pack_start(gtk.Label("Name as"), False)
#extra_entr = gtk.Entry()
#extra_entr.set_activates_default(True)
#extra_hbox.pack_start(extra_entr)
#extra_hbox.show_all()
#chooser.set_extra_widget(extra_hbox)
resp = chooser.run()
#new_name = extra_entr.get_text()
#if len(new_name) == 0:
# new_name = None
cls.last_dir = chooser.get_current_folder()
if resp == gtk.RESPONSE_OK :
uris = chooser.get_uris()
chooser.destroy()
else :
chooser.destroy()
return
for uri in uris :
load_from_file(uri, None, env)
def parse_params(pfn, header = True):
pfile = open(pfn, 'r')
params = {}
for line in pfile:
if cmp(line[0],'=') == 0:
if header :
header = False
continue
else :
break
if header: continue
commpos = line.find('#')
if commpos > 0 : line = line[0:commpos-1]
line = line.strip()
if commpos == 0 or len(line) == 0: continue
var,val = line.split('=')
var = var.strip(); val = val.strip()
params[var] = val
return params
class SourceDictionary(gobject.GObject) :
def __init__(self) :
gobject.GObject.__init__(self)
dic = aobject.get_object_dictionary()
dic.connect("add", self.check_source)
dic.connect("remove", self.check_source)
def check_source(self, ob, n, r) :
ob = aobject.get_object_from_dictionary(n)
if ob == None : ob = aobject.get_removing_from_dictionary(n)
if ob == None or ob.am("Source") :
self.emit("aesthete-source-change", n, r)
source_dictionary = SourceDictionary()
def get_source_dictionary() : return source_dictionary
def do_source_rename(dlog, resp) :
if resp == gtk.RESPONSE_ACCEPT :
dlog.object.set_aname_nice(dlog.get_text())
def make_source_rename_dialog(parent, object) :
rename_dlog = gtk.Dialog("Rename source", parent, 0,\
(gtk.STOCK_CANCEL , gtk.RESPONSE_REJECT,\
gtk.STOCK_OK , gtk.RESPONSE_ACCEPT))
rename_hbox = gtk.HBox()
rename_dlog.vbox.pack_start(rename_hbox)
rename_hbox.pack_start(gtk.Label("Name"), False, False, 15)
rename_entr = gtk.Entry()
rename_entr.set_text(object.get_aname_nice())
rename_hbox.pack_start(rename_entr)
rename_dlog.get_text = rename_entr.get_text
rename_dlog.object = object
rename_dlog.connect("response", do_source_rename)
rename_dlog.show_all()
rename_dlog.run()
rename_dlog.destroy()
def do_source_treeview_popup(trvw, event) :
if event.button != 3 : return
item = trvw.get_path_at_pos(int(event.x), int(event.y))
if item == None : return
time = event.time
context_menu = gtk.Menu()
obj = aobject.get_object_from_dictionary(trvw.get_model()[item[0]][0])
context_menu_aname_nice = gtk.MenuItem(obj.get_aname_nice())
context_menu_aname_nice.set_sensitive(False)
context_menu.add(context_menu_aname_nice)
context_menu_rename = gtk.MenuItem("Rename")
context_menu_rename.connect("button-press-event", lambda o, e : make_source_rename_dialog(trvw.get_toplevel(), obj))
context_menu.add(context_menu_rename)
context_menu_rename = gtk.MenuItem("Remove")
context_menu_rename.connect("button-press-event",
lambda o, e : obj.aes_remove())
context_menu.add(context_menu_rename)
if (obj.reloadable) :
context_menu_reload = gtk.MenuItem("Reload")
context_menu_reload.connect("button-press-event", lambda o, e : obj.source_reload())
context_menu.add(context_menu_reload)
context_menu_settings = gtk.MenuItem("Settings...")
context_menu_settings.connect("button-press-event", lambda o, e :\
obj.env.action_panel.to_action_panel(obj.get_action_panel()))
context_menu.add(context_menu_settings)
context_menu.popup( None, None, None, event.button, time )
context_menu.show_all()
connected = []
_entry_server = None
def init_entry_server() :
global _entry_server
if _entry_server is None :
_entry_server = glypher.Widget.GlyphEntry(position=(0,0))
root = glypher.Word.make_word('hi',
_entry_server.main_phrase).get_xml()
xml = ET.ElementTree(root)
_entry_server.set_xml(xml)
_entry_server.main_phrase.set_is_caching(True)
_entry_server.main_phrase.set_font_size(5)
_entry_server.main_phrase.background_colour = (0.3,0.5,0.4)
def cdf(c, cr, tm, it) :
obj = aobject.get_object_from_dictionary(tm.get_value(it,0))
if obj is None : return
#cr.set_property('xml', tm.get_value(it, 2))
#cr.set_property('text', tm.get_value(it, 1))
cr.set_property('obj', obj)
#cr.set_property('foreground', 'red' if obj.is_needs_reloaded() else 'black')
#if obj.get_aname_xml() is None :
# root = glypher.Word.make_word(obj.get_aname_nice(),
# _entry_server.main_phrase).get_xml()
#else :
# root = obj.get_aname_xml()
#xml = ET.ElementTree(root)
##_entry_server.set_xml(xml)
#_entry_server.main_phrase.draw()
#cis = _entry_server.main_phrase.cairo_cache_image_surface
#pixbuf = \
# gtk.gdk.pixbuf_new_from_data(cis.get_data(),
# gtk.gdk.COLORSPACE_RGB,
# True,
# 8,
# cis.get_width(),
# cis.get_height(),
# cis.get_stride())
#cr.set_property('pixbuf', pixbuf)
def on_needs_reloaded_status_change(ob, val, tm, i, tv, cr) :
tv.get_column(0).set_cell_data_func(cr, cdf)
tv.queue_draw()
def connect_sources_reload_signal(tm, p, i, tv, cr) :
obj = aobject.get_object_from_dictionary(tm.get_value(i,0))
if obj.get_aname() in connected : return
obj.connect("aes_needs_reloaded_status_change", on_needs_reloaded_status_change, tm, i, tv, cr)
connected.append(obj.get_aname())
def make_source_treeview() :
init_entry_server()
sources_lsst = aobject.get_object_dictionary().get_liststore_by_am('Source')
sources_trvw = gtk.TreeView(sources_lsst)
sources_trvw.unset_flags(gtk.CAN_FOCUS)
sources_trvc = gtk.TreeViewColumn('Sources'); sources_trvw.append_column(sources_trvc)
#sources_cllr = gtk.CellRendererPixbuf(); sources_trvc.pack_start(sources_cllr, True)
sources_cllr = glypher.Widget.GlyphCellRenderer(); sources_trvc.pack_start(sources_cllr, True)
#sources_trvc.set_attributes(sources_cllr, text=1)
sources_trvc.set_cell_data_func(sources_cllr, cdf)
sources_trvw.connect("button-press-event", do_source_treeview_popup)
sources_trvw.connect("row-activated", lambda t, p, c : \
aobject.get_object_dictionary().try_active_source_action())
sources_trvw.connect("cursor-changed", lambda trvw :
aobject.get_object_dictionary().selected_source_change(\
sources_lsst.get_value(\
sources_lsst.get_iter(trvw.get_cursor()[0]), 0)))
sources_lsst.connect("row-changed", connect_sources_reload_signal, sources_trvw, sources_cllr)
sources_lsst.connect("row-changed", \
lambda tm, p, i : sources_trvw.set_cursor(p, sources_trvc))
return sources_trvw
def reload_sources(ness = True) :
d = aobject.get_object_dictionary().dictionary
for key in d :
obj = d[key]
if obj.am('Source') and obj.reloadable and ( not ness or obj.is_needs_reloaded() ) :
debug_print("Reloading", obj.get_aname_nice())
obj.source_reload()
def load_from_file(uri, new_name, env) :
#if filename.startswith('file://') :
# filename = filename[7:]
suff = uri.split('.')
suff = suff[-1].lower() if len(suff) > 1 else None
if suff == 'csv' :
source = CSV_factory(uri, env)
mime_type = 'text/csv'
elif suff == 'vtu' :
source = VTK_factory(uri, env)
mime_type = 'text/csv'
else :
raise RuntimeError('Unrecognized file type : ' + uri)
if source is None :
return
#if new_name is not None :
# source.set_aname_nice(new_name)
recman = gtk.recent_manager_get_default()
#if not filename.startswith('file://') :
# uri = 'file://'+filename
#else :
# uri = filename
recman.add_full(uri, {'mime_type' : mime_type,
'app_name' : details.get_name(),
'app_exec' : details.get_command(),
'display_name' : source.get_aname_nice(),
'description' : 'Aesthete imported source',
'is_private' : False,
'groups' : ['AestheteSrc']})
gobject.signal_new("aesthete-source-change", SourceDictionary, gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, (gobject.TYPE_STRING, gobject.TYPE_STRING)) | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/sources/sources.py | sources.py |
import re
from aobject.utils import debug_print, AesFile
import gio
import numpy
import gobject
import gtk
import os
import random
from Source import Source
from aobject.aobject import AObject, aname_root_catalog, AOBJECT_CAN_NONE,\
get_status_bar, string_to_int_tup
from .. import tablemaker
def abs_sum(v, c, t) :
return sum(map(lambda j : abs(v[c[j]]-t[j]),
range(0, len(t))))
class CSV (Source) :
vals = None
dim = 2
skip_rows = 1
sort = True
last_write = None
needs_reloaded = False
def get_auto_aesthete_properties(self) :
d = Source.get_auto_aesthete_properties(self)
d.update({
'uri' : (str, (AOBJECT_CAN_NONE,)),
'skip_rows' : (int,),
'presort' : (bool,),
})
return d
def change_uri(self, val) :
if self.uri is None :
self.set_aname_nice(val.rsplit('/',1)[1])# + ' [' + val + ']')
self.uri = val
self.source_reload()
def __init__(self, uri=None, env = None, presort=False, skip_rows=1,
range_cols=None, domain_cols=None, time_cols=None) :
Source.__init__(self, "CSV", env, show = False, reloadable = True)
self.set_skip_rows(skip_rows)
self.set_presort(presort)
self.dim = 0
if domain_cols is not None and len(domain_cols) > 0 :
self.set_domain_cols(domain_cols)
self.dim += len(domain_cols)
if range_cols is not None and len(range_cols) > 0 :
self.set_range_cols(range_cols)
self.dim += len(range_cols)
if time_cols is not None and len(time_cols) > 0 :
self.set_time_cols(time_cols)
if self.dim == 0:
self.dim = None
if uri is not None :
self.set_uri(uri)
gobject.timeout_add(5000, self.source_check)
def source_type(self) :
return 'line'
def source_check(self) :
if self.uri[:7] != 'file://' :
return
#FIXME: swap to GIO monitor file (for modifications)
try :
mtime = os.stat(self.uri[7:]).st_mtime
except (OSError) :
return True
#FIXME: swap to GIO monitor file (for modifications)
if not self.needs_reloaded and self.last_write < mtime :
self.needs_reloaded = True
self.emit("aes_needs_reloaded_status_change", True)
return True
def is_needs_reloaded(self) : return self.needs_reloaded
def source_reload(self) :
if self.uri is None :
return
vals = []
self.needs_reloaded = False
#FIXME: swap to GIO monitor file (for modifications)
if self.uri[:7] == 'file://' :
self.last_write = os.stat(self.uri[7:]).st_mtime
gf = gio.File(uri=self.uri)
f = AesFile(gf)
#columns = f.readline().split(',')
vals = numpy.loadtxt(f, delimiter=',', unpack = True,
skiprows=self.get_skip_rows())
multicol = True
try :
test = vals[0][0]
except : multicol = False
#for i in range(0, len(vals[1])) : vals[1][i] *= 0.001
if multicol :
if self.dim is None :
self.dim = len(vals)
else :
self.dim = min(self.dim, len(vals))
debug_print(self.dim)
vals = zip(*vals)
vals.sort(lambda x, y : cmp(x[0], y[0]))
else :
vals.sort()
vals = [vals]
self.dim = 1
# Remove!
self.vals = vals
Source.source_reload(self)
def source_get_max_dim(self) :
return self.dim
def source_get_values(self, multi_array = False, x_range=None,
y_range=None,
time=None,
resolution=None) :
c = None
time_cols = self.get_time_cols()
if time_cols is not None :
if time is None :
time = [0]*len(time_cols)
if len(time)==len(time_cols):
c = [0]
prox = abs_sum(self.vals[0], time_cols, time)
for i in range(1, len(self.vals)) :
new_prox = abs_sum(self.vals[i], time_cols, time)
if abs(new_prox-prox) <= 1e-10 :
c.append(i)
elif new_prox < prox :
prox = new_prox
c = [i]
if multi_array :
vals = list(self.vals)
if c is not None :
vals = [vals[i] for i in c]
if self.get_presort() :
sort_col = self.get_domain_cols()[0]
vals = sorted(vals, lambda a, b : cmp(a[sort_col], b[sort_col]))
vals = zip(*vals)
ret = {'name':self.get_aname_nice()}
domain_cols = self.get_domain_cols()
if domain_cols is not None :
names = ('x','y')
for r in range(0, len(domain_cols)) :
ret[names[r]] = vals[domain_cols[r]]
else :
names['x'] = vals[0]
range_cols = self.get_range_cols()
if range_cols is not None :
if len(range_cols) == 1 :
ret['values'] = vals[range_cols[0]]
else :
ret['values'] = [vals[i] for i in range_cols]
return [ret]
else : return self.vals
def get_useful_vars(self) :
return {
'vals' : 'Values',
}
IGNORE = 0
DOMAIN = 1
RANGE = 2
TIME = 3
col_types = (IGNORE,DOMAIN,RANGE,TIME)
aname_root_catalog['CSV'] = CSV
default_csv_signatures = {
1 : (RANGE,),
2 : (DOMAIN,RANGE),
3 : (DOMAIN,DOMAIN,RANGE),
4 : (TIME,DOMAIN,DOMAIN,RANGE),
}
csv_sig_props = {
IGNORE : { 'max' : 0, 'name' : '', 'colour' : 'white' },
DOMAIN : { 'max' : 2, 'name' : 'Domain', 'colour' : 'yellow' },
RANGE : { 'max' : 2, 'name' : 'Range', 'colour' : 'red' },
TIME : { 'max' : 0, 'name' : 'Time', 'colour' : 'green' },
}
def _cycle_header(trvc, user_data) :
i, csv_sig, csv_cols = user_data
col_type = csv_sig[i]
csv_cols[col_type].remove(i)
col_type = col_types[(col_type+1)%len(col_types)]
csv_sig[i] = col_type
csv_cols[col_type].append(i)
if csv_sig_props[col_type]['max'] > 0 and \
len(csv_cols[col_type]) > csv_sig_props[col_type]['max'] :
j = csv_cols[col_type][0]
_cycle_header(None, (j, csv_sig, csv_cols))
def _redo_trvc(trvc, user_data) :
tree_view, csv_sig, csv_cols, col_lsst, skip_rows_entr = user_data
try :
skip_rows = int(skip_rows_entr.get_text())
except :
skip_rows = 0
dim = len(csv_sig)
for i in range(0, dim) :
col_trvc = tree_view.get_column(i)
col_type = csv_sig[i]
n = csv_cols[col_type].index(i)
if col_type is IGNORE :
col_title = ''
else :
col_title = csv_sig_props[col_type]['name'] + (' %d'%(n+1))
col_trvc.set_title(col_title)
k = 0
for row in col_lsst :
if k < skip_rows :
colour = 'gray'
else :
colour = csv_sig_props[col_type]['colour']
col_lsst.set_value(row.iter, i+dim, colour)
k += 1
col_trvc.set_fixed_width(300)
def _clear_sig(csv_sig, csv_cols) :
for i in range(0, len(csv_sig)) :
csv_sig[i] = IGNORE
for ty in col_types :
csv_cols[ty] = []
csv_cols[IGNORE] = range(0, len(csv_sig))
def CSV_factory(uri, env = None) :
gf = gio.File(uri=uri)
f = AesFile(gf)
vals = numpy.genfromtxt(f, delimiter=',', unpack=True, skiprows=0)
vals_by_row = zip(*vals)
for i in range(0, 5) :
try :
map(float, vals_by_row[i])
except:
debug_print(vals_by_row[i])
break
skip_rows = 0
if i < 4 :
skip_rows = i
chooser = gtk.Dialog(\
title="Import CSV", parent=env.toplevel,
flags=gtk.DIALOG_MODAL|gtk.DIALOG_DESTROY_WITH_PARENT,
buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK))
chooser.set_default_response(gtk.RESPONSE_OK)
vbox = gtk.VBox()
pref_tab = tablemaker.PreferencesTableMaker()
name_as_entr = gtk.Entry()
pref_tab.append_row('Name as', name_as_entr)
skip_rows_entr = gtk.Entry()
skip_rows_entr.set_text(str(skip_rows))
pref_tab.append_row('Skip rows', skip_rows_entr)
presort_chkb = gtk.CheckButton()
pref_tab.append_row('Presort', presort_chkb)
vbox.pack_start(pref_tab.make_table())
vbox.pack_start(gtk.VSeparator())
col_select_labl = gtk.Label()
col_select_labl.set_markup('<b>Set column types</b>')
vbox.pack_start(col_select_labl)
dim = len(vals)
cols = [str]*(2*dim)
col_lsst = gtk.ListStore(*cols)
col_trvw = gtk.TreeView(col_lsst)
if dim in default_csv_signatures :
csv_sig = list(default_csv_signatures[dim])
else :
csv_sig = [IGNORE]*dim
csv_cols = {}
for col_type in col_types :
csv_cols[col_type] = []
for i in range(0, dim) :
col_type = csv_sig[i]
csv_cols[col_type].append(i)
col_trvc = gtk.TreeViewColumn('')
col_trvw.append_column(col_trvc)
col_trvc.set_clickable(True)
col_crtx = gtk.CellRendererText()
col_trvc.pack_start(col_crtx, True)
col_trvc.add_attribute(col_crtx, 'text', i)
col_trvc.add_attribute(col_crtx, 'background', i+dim)
col_trvc.connect("clicked", _cycle_header, (i, csv_sig, csv_cols))
col_trvc.connect_after("clicked", _redo_trvc, (col_trvw, csv_sig,
csv_cols, col_lsst,
skip_rows_entr))
for i in range(0, 4) :
if i < len(vals[0]) :
col_lsst.append(\
[str(vals[j][i]) for j in range(0, dim)]+\
[csv_sig_props[csv_sig[j]]['colour'] for j in range(0, dim)]\
)
else :
col_lsst.append(['']*dim)
_redo_trvc(None, (col_trvw, csv_sig, csv_cols, col_lsst, skip_rows_entr))
vbox.pack_start(col_trvw)
clear_butt = gtk.Button("Clear")
clear_butt.connect("clicked", lambda w : (\
_clear_sig(csv_sig, csv_cols),
_redo_trvc(None, (col_trvw, csv_sig, csv_cols, col_lsst, skip_rows_entr))))
skip_rows_entr.connect('changed', lambda e :
_redo_trvc(None, (col_trvw, csv_sig, csv_cols, col_lsst, e)))
vbox.pack_start(clear_butt)
chooser.get_content_area().add(vbox)
chooser.get_content_area().show_all()
resp = chooser.run()
chooser.grab_focus()
if resp == gtk.RESPONSE_OK :
try :
skip_rows = int(skip_rows_entr.get_text())
except :
skip_rows = 0
#status_bar = get_status_bar()
#status_bar.push(0, "[CSV Import] Could not parse preferences")
#chooser.destroy()
#return
presort = presort_chkb.get_active()
name_as = name_as_entr.get_text()
if name_as == '' :
name_as = uri.rsplit('/',1)[1]
if len(csv_cols[IGNORE])>0 :
name_as += ' [%s]' % ','.join(map(str,csv_cols[RANGE]))
chooser.destroy()
else :
chooser.destroy()
return
csv = CSV(uri, env, skip_rows=skip_rows,
presort=presort,
domain_cols=tuple(csv_cols[DOMAIN]),
range_cols=tuple(csv_cols[RANGE]),
time_cols=tuple(csv_cols[TIME]),
)
csv.set_aname_nice(name_as)
return csv | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aesthete/sources/CSV.py | CSV.py |
import re
import gio
import shutil
from utils import debug_print, debug_print_stack, AesFile
import traceback
import os
import lxml.etree as ET
import details
import paths
import pango
import matplotlib
import alogger
import gobject
import gtk
import math
AOBJECT_CAN_NONE = 1
AOBJECT_NO_CHANGE = 2
ICON_SIZE_PREFERENCE =\
gtk.icon_size_register('aes-icon-size-preference', 8, 8)
def to_string(val) :
if val is None :
return ''
return str(val)
#FIXME: THIS DOESN'T HANDLE TUPS OF STRINGS WITH COMMAS IN THEM
def cast_to_tup(string) :
if isinstance(string, tuple) :
return string
return tuple(string[1:-1].split(','))
def cast_can_none(cast, str_or_val) :
if str_or_val == '' or str_or_val == None :
return None
else :
return cast(str_or_val)
def make_cast_can_none(cast) :
return lambda s : cast_can_none(cast, s)
def cast_to_bool(string) :
return str(string) == 'True'
def make_change(self, name) :
return lambda val : self.__dict__.__setitem__(name, val)
def make_set(self, name) :
return lambda val : self.change_property(name, val)
def make_get(self, name, can_none) :
if can_none :
return lambda : None \
if self.__getattribute__(name) == '' else \
self.__getattribute__(name)
else :
return lambda : self.__getattribute__(name)
def _trace_lock(cr, a2, r, g, b, a) :
cr.set_source_rgba(r, g, b, a*a2)
cr.move_to(0, -1)
cr.line_to(0, 3)
cr.line_to(-4, 3)
cr.line_to(-4, -1)
cr.arc(-2, -1, 2, math.pi, 0)
cr.fill()
cr.set_source_rgba(1.,1.,1.,1.)
cr.arc(-2, -1, 1, math.pi, 0)
cr.close_path()
cr.fill()
class AesPrefEntry (gtk.EventBox) :
__gsignals__ = { \
"realize" : "override", \
"preference-toggled" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
()),
"set-event" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
()),
"expose-event" : "override", \
"key-release-event" : "override",
}
_colours = { 'off' : (.6,.6,.6,.7),
'on' : (.7,.7,.2,.7),
'wrong' : (.7,.2,.2,.7) }
def __init__(self, name, get, get_pref, entry_widget=None) :
gtk.EventBox.__init__(self)
#self.connect_after("realize", self._realize)
if entry_widget is None :
self.entry = gtk.Entry() if entry_widget is None else entry_widget
self.entry.connect("changed", lambda o : self.check_changed())
else :
self.entry = entry_widget
e = self.entry
self.add(self.entry)
cols = ("gray", "gold", "red", "lightgray")
self.set_tooltip_markup(\
"<b>Preferenceable text entry</b>\n"
"This allows you to edit a value before Setting it (use Return) "
"and to save a Set value as a Preference for next time (Ctrl+Return). "
"Reset to a stored Preference using Shift+Return.\n"
"<span foreground='%s'>Grey lock</span>: "
"No stored preference\n"
"<span foreground='%s'>Gold lock</span>: "
"Current <i>set</i> value equals stored preference\n"
"<span foreground='%s'>Red lock</span>: "
"Current <i>set</i> value is not stored preference\n"
"<span foreground='%s'>Any lock faded</span>: "
"Current text entry value has not been Set"
%cols)
#screen = e.get_screen()
#rgba = screen.get_rgba_colormap()
#e.set_colormap(rgba)
#e.connect_after("realize", self._realize_entry)
self.entry.show()
self.check_changed = lambda : \
self.set_changed(self.get_text()!=to_string(get()))
self.get_preference = get_pref
changed = False
def get_changed(self) :
return self.changed
def set_changed(self, changed) :
self.changed = changed
preference_matches = False
def get_preference_matches(self) :
return self.preferenceable
def set_preference_matches(self, can) :
self.preference_matches = can
def do_realize(self) :
ret = gtk.EventBox.do_realize(self)
self.entry.realize()
self.entry.window.set_composited(True)
return ret
def get_text(self) :
return self.entry.get_text()
def set_text(self, t) :
self.entry.set_text(t)
self.check_changed()
preference_active = False
def get_preference_status(self) :
if not self.preference_active :
return "off"
if self.preference_matches :
return "on"
else :
return "wrong"
def set_preference_status(self, act) :
self.preference_active = act != 'off'
self.preference_matches = act == 'on'
self.queue_draw()
def do_key_release_event(self, event):
keyname = gtk.gdk.keyval_name(event.keyval)
m_control = bool(event.state & gtk.gdk.CONTROL_MASK)
m_shift = bool(event.state & gtk.gdk.SHIFT_MASK)
m_alt = bool(event.state & gtk.gdk.MOD1_MASK)
m_super = bool(event.state & gtk.gdk.SUPER_MASK)
if keyname == 'Return' :
if m_shift and self.get_preference() is not None :
self.set_text(self.get_preference())
self.emit("set-event")
if m_control :
self.preference_active = not self.preference_active
self.emit("preference-toggled")
override_show = False
def get_override_show(self) :
return self.override_show
def set_override_show(self, show) :
self.override_show = show
self.queue_draw()
def should_show_lock(self) :
return self.entry.has_focus()
def do_expose_event(self, event) :
x, y, widget, height = event.area
ret = gtk.EventBox.do_expose_event(self, event)
cr = self.window.cairo_create()
e = self.entry
if e.window is not None :
cr.set_source_pixmap(e.window,
e.allocation.x,
e.allocation.y)
cr.paint()
if (self.get_override_show() or self.should_show_lock()) :
al = e.allocation
cr.translate(al.x+al.width,
al.y+.5*al.height)
cr.scale(3,3)
cr.translate(-2, 0)
state = self.get_preference_status()
alpha = 0.75 if self.changed else 1.0
_trace_lock(cr, alpha, *self._colours[state])
cr.fill()
return ret
class AesPrefTupleEntry (AesPrefEntry) :
def __init__(self, name, get, get_pref, tuple_length=2) :
hbox = gtk.HBox()
self.grey = gtk.gdk.Color('#DDDDDD')
entries = []
self.entries = entries
for i in range(0, tuple_length) :
entries.append(gtk.Entry())
entries[i].set_has_frame(False)
if i%2 == 0 :
entries[i].modify_base(gtk.STATE_NORMAL, self.grey)
hbox.pack_start(entries[i])
entries[i].connect("changed", lambda o : self.check_changed())
entries[i].connect('focus-in-event', lambda o, e : self.queue_draw())
entries[i].connect('focus-out-event', lambda o, e : self.queue_draw())
hbox.show_all()
inner_event_box = gtk.EventBox()
inner_event_box.add(hbox)
AesPrefEntry.__init__(self, name, get, get_pref,
entry_widget=inner_event_box)
def should_show_lock(self) :
hf = [e.has_focus() for e in self.entries]
return True in hf or self.get_child().get_child().has_focus()
def set_text(self, text) :
l = text[1:-1].split(',')
if len(l) != len(self.entries) :
raise RuntimeError(\
'Tuple wrong length for setting in TupleEntry: '
"%d but expected %d" % (len(l), len(self.entries)))
for i in range(0, len(l)) :
self.entries[i].set_text(l[i])
self.check_changed()
def get_text(self) :
return "(%s)" % ','.join([e.get_text() for e in self.entries])
def check_float(var) :
if var == '' or var is None :
return False
try : float(var)
except ValueError :
return False
if var[0] == '.' or var[-1] == '.' :
return False
return True
class Env :
logger = None
action_panel = None
toplevel = None
def __init__ (self, logger = None, action_panel = None, toplevel = None) :
self.logger = logger
self.action_panel = action_panel
self.toplevel = toplevel
def aespref_adj(obj, butt, propname) :
get = obj.aesthete_properties[(propname, obj.get_aname())][1]
if (obj.get_preference(propname) == to_string(get())) :
if not butt.preference_active : obj.set_preference(propname, None)
else :
if butt.preference_active : obj.set_preference(propname, to_string(get()))
def butt_adj(obj, butt, propname) :
get = obj.aesthete_properties[(propname, obj.get_aname())][1]
if (obj.get_preference(propname) == to_string(get())) :
if not butt.get_active() : obj.set_preference(propname, None)
else :
if butt.get_active() : obj.set_preference(propname, to_string(get()))
def col_to_tup_str (c) : string = '('+str(c.red/65535.0)+','+str(c.green/65535.0)+','+str(c.blue/65535.0)+')'; return string
def mpl_to_tuple (c) : return tuple(map(float,matplotlib.colors.colorConverter.to_rgb(c)))
font_weight_hash = {
pango.WEIGHT_ULTRALIGHT : "ultralight",\
pango.WEIGHT_LIGHT : "light",\
pango.WEIGHT_NORMAL : "normal",\
pango.WEIGHT_BOLD : "bold",\
pango.WEIGHT_ULTRABOLD : "ultrabold",\
pango.WEIGHT_HEAVY : "heavy"
}
font_style_hash = {
pango.STYLE_NORMAL : "normal",
pango.STYLE_ITALIC : "italic",
pango.STYLE_OBLIQUE : "oblique"
}
font_variant_hash = {
pango.VARIANT_NORMAL : "normal",
pango.VARIANT_SMALL_CAPS : "small-caps"
}
font_variant_bhash = {}
for k in font_variant_hash : font_variant_bhash[font_variant_hash[k]] = k
font_style_bhash = {}
for k in font_style_hash : font_style_bhash[font_style_hash[k]] = k
font_weight_bhash = {}
for k in font_weight_hash : font_weight_bhash[font_weight_hash[k]] = k
def update_combo (cmbo, lv) :
mdl = cmbo.get_model()
for row in mdl : mdl.remove(row.iter)
for v in lv : cmbo.append_text(v)
def update_combo_select(cmbo, v) :
mdl = cmbo.get_model()
for row in mdl :
if row[0] == v : cmbo.set_active_iter(row.iter)
def update_object_combo (cmbo, lv) :
mdl = cmbo.get_model()
for row in mdl : mdl.remove(row.iter)
for v in lv : mdl.append((v, get_object_from_dictionary(v).get_aname_nice()))
def font_to_mpl (label_props, val) :
fdes = pango.FontDescription(val)
label_props.set_name(fdes.get_family()); label_props.set_size(fdes.get_size()/pango.SCALE)
label_props.set_style(font_style_hash[fdes.get_style()])
label_props.set_weight(font_weight_hash[fdes.get_weight()])
label_props.set_variant(font_variant_hash[fdes.get_variant()])
def mpl_to_font (label_props) :
fdes = pango.FontDescription()
fdes.set_family(label_props.get_name()); fdes.set_size(int(label_props.get_size_in_points()*pango.SCALE))
fdes.set_style(font_style_bhash[label_props.get_style()])
fdes.set_weight(font_weight_bhash[label_props.get_weight()])
fdes.set_variant(font_variant_bhash[label_props.get_variant()])
return fdes.to_string()
def string_to_int_tup (string) :
if isinstance(string, tuple) :
return string
string_tuple = string.split(',')
int_list = []
for string_entry in string_tuple :
entry_match = re.match('[^\d]*([\d]+)[^\d]*', string_entry)
try:
int_list.append(int(entry_match.group(1)))
except :
pass
return tuple(int_list)
def string_to_float_tup (string) :
if isinstance(string, tuple) :
return string
string_tuple = string.split(',')
float_list = []
for string_entry in string_tuple :
entry_match = re.match('[^\d]*([\d\.]+)[^\d]*', string_entry)
try:
float_list.append(float(entry_match.group(1)))
except :
pass
return tuple(float_list)
#PROPERTIES FUNCTIONS
#Remember that the get_ should never return None
#Properties are phrased as 'name' : [change_cb, get_cb, log changes]
#NEW EASIER ROUTE:
# add property blah by creating get_blah and change_blah (without casting
# option) and write
# get_auto_aesthete_properties(self) :
# return {'blah' : (str,) }
# Aesthete should pair blah with get_blah/change_blah automatically and,
# if there isn't a name clash, should create set_blah for you. All casting
# will be done using the supplied function (here, str)
#Note casting function should cast from str to whatever you need it to be.
class AObject(gobject.GObject) :
aname = None
aname_root = None
aname_num = 0
aname_xml = None
# While we can add as many loggers as we desire,
# this approach allows us to expect every object
# to be logging somewhere
logger = None
logger_con = None
aesthete_properties = None
property_manager = False
property_manager_conn = None
property_store = None
method_children = None
method_window = None
view_object = False
status_id = None
row_changed_conn = -1
editable_aname_nice = True
absorber_win = None
absorber_conn = None
absorber_ann_conn = None
absorber_as = None
absorber = None
aesthete_method_widgets = None
mes = None
property_connections = None
aesthete_xml = None
# Can add this to aname_root_catalog instead of class to allow managed
# object creation (NB: creation of absorbed objects is first offered to
# absorber)
@classmethod
def aes_load_a(self, env, **parameters) :
pass
def get_arepr(self) :
if self.get_aesthete_xml() is not None :
return self.get_aesthete_xml()
return self.get_aname_nice()
def set_aesthete_xml(self, xml) :
self.aesthete_xml = xml
# Trigger redisplay
self.change_property('name_nice', self.get_aname_nice())
def get_aesthete_xml(self) :
return self.aesthete_xml
def print_out(self, op, pc, pn) :
op.cancel()
def __init__(self, name_root, env = None, show = True, property_manager = True, view_object = False, editable_aname_nice = True, elevate = True) :
self.add_me('aobject') # make sure name_root always in there!
self.add_me(name_root) # make sure name_root always in there!
self.aesthete_properties = {}
self.property_connections = {}
self.method_children = []
self.absorbed = []
object_dictionary = get_object_dictionary()
# PROPERTIES
gobject.GObject.__init__(self)
self.aname_root = name_root
self.view_object = view_object
self.editable_aname_nice = editable_aname_nice
# aname only valid after this statement
object_dictionary.assign(self)
#self.aesthete_properties = self.get_aesthete_properties()
#self.aesthete_properties['name_nice'] = [self.change_aname_nice, self.set_aname_nice, True]
self.env = env
if self.env is not None : self.logger = env.logger
self.env = env
if self.logger is not None : self.add_logger(self.logger)
self.init_properties(property_manager)
self.aesthete_method_widgets = []
self.method_window = gtk.VBox()
self.add_method_window(self.get_method_window())
#self.method_window.show_all()
self.method_window.show()
self.set_ui()
object_dictionary.add(self, show)
self.status_id = get_status_bar().get_context_id(self.get_aname())
self.connect('aesthete-property-change', self.aes_method_update)
if show and elevate : self.elevate()
source_action = None
def add_me(self, what) :
if self.mes == None : self.mes = [what]
else : self.mes.append(what)
def am(self, what) : return what in self.mes
def aes_remove(self):
if self.absorber != None :
self.absorber.rinse_properties(self)
global object_dictionary
object_dictionary.remove(self)
def __del__(self):
self.aes_remove()
def init_properties(self, property_manager = True) :
self.property_store = gtk.TreeStore(str, str, str, bool, bool, str, bool)
self.set_property_manager(property_manager)
self.append_properties()
self.row_changed_conn = self.property_store.connect("row-changed",\
(lambda model, path, it : \
self.change_property(\
self.property_store.get_value(it,0),\
self.property_store.get_value(it,2),\
self.property_store.get_value(it,1))))
def add_logger(self, logger) : self.connect("aesthete-logger", logger.append_line)
def add_property_connection(self, prop, handler) :
if (prop, self.get_aname()) not in self.aesthete_properties :
print "UNKNOWN PROPERTY : " + prop + " FOR " + self.get_aname_nice()
if prop not in self.property_connections :
self.property_connections[prop] = []
self.property_connections[prop].append(handler)
def change_property(self, prop, val, aname = None):
if self.absorber != None :
self.absorber.emit_property_change(prop,
self.absorber_as if aname is None else aname)
if aname == None : aname = self.get_aname()
fns = self.aesthete_properties[(prop,aname)]
cast = fns[1] if len(fns) < 4 else fns[3]
if (cast(val) == fns[1]()): return
if fns[0] :
fns[0](cast(val))
self.emit("aesthete-property-change", prop, to_string(fns[1]()), aname)
# SHOULD THIS NOW GO THROUGH ABSORBEES p_c's?
if prop in self.property_connections :
for handler in self.property_connections[prop] :
handler(val)
def emit_property_change(self, prop, aname = None):
if self.absorber != None :
self.absorber.emit_property_change(prop,
self.absorber_as if aname is None else aname)
if aname == None : aname = self.get_aname()
fns = self.aesthete_properties[(prop,aname)]
self.emit("aesthete-property-change", prop, to_string(fns[1]()), aname)
ui_merge_id = None
ui_ui_string = None
ui_action_group = None
def set_ui(self) :
pass
def get_useful_vars(self) :
return None
def get_aesthete_properties(self) : return {}
def get_property_store(self) : return self.property_store
def get_property_manager(self) : return self.property_manager
def get_aname(self) : return self.aname_root + '-' + str(self.aname_num)
def get_aname_nice(self, val=None) : return self.aname_nice if val==None else val #compat w properties
def get_aname_root(self) : return self.aname_root
def get_aname_num(self) : return self.aname_num
def get_alogger(self) : return self.logger
def get_aenv(self) : return self.env
def set_aname_num(self, name_num) : self.aname_num = name_num
# Distinguish between these two! set_aname_nice is a convenience end-user fn, change_aname_nice is the background work-a-day.
def set_aname_nice(self, name_nice) : self.change_property('name_nice', name_nice)
def change_aname_nice(self, name_nice) : self.aname_nice = name_nice; self.emit("aesthete-aname-nice-change", self.get_aname(), name_nice)
def set_property_manager(self, property_manager) :
# Should set properties page_matrix to absorber, somehow reversably
if property_manager == self.property_manager : return
if property_manager :
self.property_manager_conn = self.connect("aesthete-property-change", self.do_property_change)
else :
self.disconnect(self.property_manager_conn)
self.property_manager_conn = None
self.property_manager = property_manager
global object_dictionary; object_dictionary.set_show(self, property_manager)
def log(self, nature, string) :
self.emit("aesthete-logger", self.get_aname_nice(), nature, string)
def elevate(self, properties=True) :
get_object_dictionary().set_active(self, properties)
def update_absorbee_aname_nice(self, obj, aname, new) :
if self.row_changed_conn > 0 : self.property_store.handler_block(self.row_changed_conn)
for row in self.property_store :
if row[1] == aname and self.property_store.iter_n_children(row.iter) > 0 :
row[0] = new
#if self.property_store.iter_n_children(row.iter) > 0 : row[0] = get_object_from_dictionary(row[1]).get_aname_nice()
if self.row_changed_conn > 0 : self.property_store.handler_unblock(self.row_changed_conn)
def aes_method_update(self, other, prop, val, aname) :
if aname != self.get_aname() : return
for tup in self.aesthete_method_widgets :
if tup[0] == prop :
fns = self.aesthete_properties[(prop,aname)]
get = fns[1]
cast = get if len(fns) < 4 else fns[3]
if val is not None : tup[2](cast(val))
if tup[3] != None :
tup[3](self._get_pref_status(prop, get))
def _get_pref_status(self, prop, get) :
pref = self.get_preference(prop)
return ("off" if pref is None else ("on" if pref==to_string(get()) else "wrong"))
# These are parameters necessary to create a new object of ours, that can't
# be held back for a change_property
def aes_get_parameters(self) :
return {}
def aes_add_a(self, aname_root, **parameters) :
return None
def aes_method_preference_toggle(self, propname, get) :
butt = gtk.ToggleButton();
butt.set_relief(gtk.RELIEF_NONE)
butt_im = gtk.Image(); butt_im.set_from_stock(gtk.STOCK_JUMP_TO,
ICON_SIZE_PREFERENCE)
butt.add(butt_im); butt.set_size_request(30, 0)
butt.set_active(self.get_preference(propname) == to_string(get()))
butt.connect("toggled", lambda o : butt_adj(self, butt, propname))
butt.set_tooltip_text("Toggle storage of current value as a Preference "
"for next time")
return butt
def aes_method_colour_button(self, propname, button_text = None, preferencable = True) :
if button_text == None :
button_text = "Set " + propname
prop = self.aesthete_properties[(propname, self.get_aname())]
co_butt = gtk.ColorButton(); co_butt.set_title(button_text)
co_butt.set_color(gtk.gdk.Color(*prop[1]()))
ret = co_butt; pref_func = None
if preferencable :
co_hbox = gtk.HBox()
cop_butt = self.aes_method_preference_toggle(propname, prop[1])
co_hbox.pack_start(co_butt)
co_hbox.pack_start(cop_butt, False, False)
pref_func = cop_butt.set_active
ret = co_hbox
self.aesthete_method_widgets.append((propname, co_butt, lambda v :
co_butt.set_color(gtk.gdk.Color(*v)),
None if pref_func is None else \
lambda s : pref_func(s=='on')))
co_butt.connect("color-set", \
lambda o : self.change_property(propname, col_to_tup_str(o.get_color())))
return ret
def aes_method_font_button(self, propname, button_text = None, preferencable = True) :
if button_text == None : button_text = "Set " + propname
prop = self.aesthete_properties[(propname, self.get_aname())]
ft_butt = gtk.FontButton(); ft_butt.set_title(button_text)
ft_butt.set_font_name(prop[1]())
ret = ft_butt; pref_func = None
if preferencable :
ft_hbox = gtk.HBox()
ftp_butt = self.aes_method_preference_toggle(propname, prop[1])
ft_hbox.pack_start(ft_butt)
ft_hbox.pack_start(ftp_butt, False, False)
pref_func = ftp_butt.set_active
ret = ft_hbox
self.aesthete_method_widgets.append((propname, ft_butt, lambda v : ft_butt.set_font_name(v),
None if pref_func is None else \
lambda s : pref_func(s=='on')))
ft_butt.connect("font-set", \
lambda o : self.change_property(propname, o.get_font_name()))
ft_butt_font_labl = ft_butt.get_child().get_children()[0]
ft_butt_size_labl = ft_butt.get_child().get_children()[2]
attrs = pango.AttrList()
attrs.insert(pango.AttrScale(pango.SCALE_SMALL, 0, -1))
for labl in (ft_butt_font_labl, ft_butt_size_labl) :
labl.set_attributes(attrs)
ft_butt_font_labl.set_ellipsize(pango.ELLIPSIZE_END)
return ret
# Does nothing on selection!
def aes_method_object_combo(self, propname) :
prop = self.aesthete_properties[(propname, self.get_aname())]
en_lsto = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING)
en_cmbo = gtk.ComboBox(en_lsto)
en_cllr = gtk.CellRendererText(); en_cmbo.pack_start(en_cllr)
en_cllr.props.ellipsize = pango.ELLIPSIZE_END
en_cmbo.add_attribute(en_cllr, 'text', 1)
update_object_combo(en_cmbo, prop[1]())
self.aesthete_method_widgets.append((propname, en_cmbo, lambda v : update_object_combo(en_cmbo, v), None))
return en_cmbo
def aes_method_automate_combo(self, combo, propname, col) :
self.aesthete_method_widgets.append((propname, combo, lambda v : update_combo_select(combo, v), None))
lsto = combo.get_model()
combo.connect("changed",
lambda o : None if o.get_active_iter() is None else self.change_property(propname,
lsto.get(o.get_active_iter(),
col)[0]))
def aes_method_automate_combo_text(self, combo, propname) :
self.aesthete_method_widgets.append((propname, combo, lambda v : update_combo_select(combo, v), None))
combo.connect("changed", \
lambda o : self.change_property(propname, o.get_active_text()))
def aes_method_combo(self, propname, preferencable = True) :
prop = self.aesthete_properties[(propname, self.get_aname())]
en_cmbo = gtk.combo_box_new_text()
#en_cmbo.set_size_request(30,-1)
en_cmbo[0].props.ellipsize = pango.ELLIPSIZE_END
update_combo(en_cmbo, prop[1]())
ret = en_cmbo; pref_func = None
if preferencable :
en_hbox = gtk.HBox()
enp_butt = self.aes_method_preference_toggle(propname, prop[1])
en_hbox.pack_start(en_cmbo)
en_hbox.pack_start(enp_butt, False, False)
pref_func = enp_butt.set_active
ret = en_hbox
self.aesthete_method_widgets.append((propname, en_cmbo, lambda v : update_combo(en_cmbo, v),
None if pref_func is None else \
lambda s : pref_func(s=='on')))
return ret
# Automatic update
def aes_method_entry(self, propname, has_frame = False, preferencable = True, wait_until_parsable_float = False) :
prop = self.aesthete_properties[(propname, self.get_aname())]
en_entr = gtk.Entry(); en_entr.set_has_frame(has_frame)
en_entr.set_text(to_string(prop[1]()))
ret = en_entr; pref_func = None
if preferencable :
en_hbox = gtk.HBox()
enp_butt = self.aes_method_preference_toggle(propname, prop[1])
en_hbox.pack_start(en_entr)
en_hbox.pack_start(enp_butt, False, False)
pref_func = enp_butt.set_active
ret = en_hbox
self.aesthete_method_widgets.append((propname, en_entr, lambda v :
en_entr.set_text(to_string(v)),
None if pref_func is None else \
lambda s : pref_func(s=='on')))
if wait_until_parsable_float :
en_entr.connect("changed", \
lambda o : self.change_property(propname, en_entr.get_text()) if check_float(en_entr.get_text()) else 0)
else :
en_entr.connect("changed", \
lambda o : self.change_property(propname, en_entr.get_text()))
return ret
# Manual (button) update
def aes_method_tuple_entry_update(self, propname, button_text=None, preferencable=True) :
prop = self.aesthete_properties[(propname, self.get_aname())]
if button_text is not None :
eu_butt = gtk.Button(button_text)
if preferencable :
tup_len = len(prop[1]())
eu_entr = AesPrefTupleEntry(propname, prop[1], lambda :
self.get_preference(propname), tup_len)
eu_entr.set_preference_status(\
self._get_pref_status(propname, prop[1]))
#eu_butt.connect_after("clicked", lambda o : \
# eu_entr.set_preferenceable(eu_entr.get_text()==str(prop[1]())))
eu_entr.connect("set-event",
lambda o : self.change_property(propname, eu_entr.get_text()))
eu_entr.connect("preference-toggled", lambda o : aespref_adj(self, eu_entr, propname))
if button_text is not None :
eu_butt.connect("focus-in-event", lambda o, e :\
eu_entr.set_override_show(True))
eu_butt.connect("focus-out-event", lambda o, e :\
eu_entr.set_override_show(False))
pref_func = eu_entr.set_preference_status
else :
eu_entr = gtk.Entry()
pref_func = None
eu_entr.set_size_request(30,-1)
eu_entr.set_text(to_string(prop[1]()))
if button_text is not None :
eu_hbox = gtk.HBox()
ret = eu_hbox
eu_butt.connect("clicked", \
lambda o : self.change_property(propname, to_string(eu_entr.get_text())))
# Pack end to allow easy label addition
eu_hbox.pack_end(eu_butt); eu_hbox.pack_end(eu_entr)
else :
ret = eu_entr
self.aesthete_method_widgets.append((propname, eu_entr, lambda v :
eu_entr.set_text(to_string(v)),
pref_func))
return ret
# Manual (button) update
def aes_method_entry_update(self, propname, button_text = None, preferencable = True) :
prop = self.aesthete_properties[(propname, self.get_aname())]
if button_text is not None :
eu_butt = gtk.Button(button_text)
if preferencable :
eu_entr = AesPrefEntry(propname, prop[1], lambda :
self.get_preference(propname))
eu_entr.set_preference_status(\
self._get_pref_status(propname, prop[1]))
#eu_butt.connect_after("clicked", lambda o : \
# eu_entr.set_preferenceable(eu_entr.get_text()==str(prop[1]())))
eu_entr.connect("set-event",
lambda o : self.change_property(propname, to_string(eu_entr.get_text())))
eu_entr.connect("preference-toggled", lambda o : aespref_adj(self, eu_entr, propname))
if button_text is not None :
eu_butt.connect("focus-in-event", lambda o, e :\
eu_entr.set_override_show(True))
eu_butt.connect("focus-out-event", lambda o, e :\
eu_entr.set_override_show(False))
pref_func = eu_entr.set_preference_status
else :
eu_entr = gtk.Entry()
pref_func = None
eu_entr.set_size_request(30,-1)
eu_entr.set_text(to_string(prop[1]()))
if button_text is not None :
eu_hbox = gtk.HBox()
ret = eu_hbox
eu_butt.connect("clicked", \
lambda o : self.change_property(propname, eu_entr.get_text()))
# Pack end to allow easy label addition
eu_hbox.pack_end(eu_butt); eu_hbox.pack_end(eu_entr)
else :
ret = eu_entr
self.aesthete_method_widgets.append((propname, eu_entr, lambda v :
eu_entr.set_text(to_string(v)),
pref_func))
return ret
def aes_method_toggle_button(self, propname, label = None,
preferencable = True, onoff = None) :
check = gtk.ToggleButton(label=label)
prop = self.aesthete_properties[(propname, self.get_aname())]
ret = check; pref_func = None
if preferencable :
ck_hbox = gtk.HBox()
ckp_butt = self.aes_method_preference_toggle(propname, prop[1])
ck_hbox.pack_start(check)
ck_hbox.pack_start(ckp_butt, False, False)
pref_func = ckp_butt.set_active
ret = ck_hbox
self.aesthete_method_widgets.append((propname, check, lambda v : check.set_active(v),
None if pref_func is None else \
lambda s : pref_func(s=='on')))
check.set_active(prop[1]())
check.connect("toggled", \
lambda o : self.change_property(propname, str(check.get_active())))
if onoff is not None :
text = label + " : " if label is not None else ""
update_label = \
lambda o : check.set_label(text + onoff[\
0 if check.get_active() else 1])
check.connect("toggled", update_label)
update_label(check)
return ret
def aes_method_check_button(self, propname, label = None, preferencable = True) :
check = gtk.CheckButton(label)
prop = self.aesthete_properties[(propname, self.get_aname())]
ret = check; pref_func = None
if preferencable :
ck_hbox = gtk.HBox()
ckp_butt = self.aes_method_preference_toggle(propname, prop[1])
ck_hbox.pack_start(check)
ck_hbox.pack_start(ckp_butt, False, False)
pref_func = ckp_butt.set_active
ret = ck_hbox
self.aesthete_method_widgets.append((propname, check, lambda v : check.set_active(v),
None if pref_func is None else \
lambda s : pref_func(s=='on')))
check.set_active(prop[1]())
check.connect("toggled", \
lambda o : self.change_property(propname, str(check.get_active())))
return ret
# Doesn't get ancestors!!
def get_all_aesthete_properties(self) :
props = self.get_aesthete_properties()
if hasattr(self, 'get_auto_aesthete_properties') :
auto_props = self.get_auto_aesthete_properties()
for name in auto_props.keys() :
change = 'change_'+name
if not hasattr(self, change) :
change = None
else :
change = self.__getattribute__(change)
get = 'get_'+name
if not hasattr(self, get) :
get = None
else :
get = self.__getattribute__('get_'+name)
props[name] = [change,
get,
True]+\
list(auto_props[name])
if props[name][3] == tuple :
props[name][3] = cast_to_tup
if props[name][3] == bool :
props[name][3] = cast_to_bool
if len(auto_props[name]) > 1 and \
AOBJECT_CAN_NONE in auto_props[name][1] :
props[name][3] = make_cast_can_none(props[name][3])
props['name_nice'] = [self.change_aname_nice if self.editable_aname_nice else None, self.get_aname_nice, True]
return props
def get_preference(self, prop, aname_root=None) :
if aname_root is None :
aname_root = self.get_aname_root()
return get_preferencer().get_preference(self.get_aname_root(), prop)
def set_preference(self, prop, val = None, aname=None, aname_root=None) :
if aname_root is None :
aname_root = self.get_aname_root()
if aname is None :
aname = self.get_aname()
get_preferencer().set_preference(aname_root, prop, val)
self.do_property_change(None, prop, None, aname)
self.aes_method_update(None, prop, None, aname)
def append_properties(self, props = None, aname = None) :
parent = None
if props == None :
props = self.get_all_aesthete_properties()
if aname == None :
aname = self.get_aname()
elif aname != self.get_aname() :
obj = get_object_from_dictionary(aname)
if self.row_changed_conn > 0 : self.property_store.handler_block(self.row_changed_conn)
parent = self.property_store.append(None, [obj.get_aname_nice(), aname, '', False, True, aname, False])
if self.row_changed_conn > 0 : self.property_store.handler_unblock(self.row_changed_conn)
for name in props :
get = props[name][1]
change = props[name][0]
cast = get if len(props[name]) < 4 else props[name][3]
if not hasattr(self, 'set_'+name) :
self.__dict__['set_'+name] = make_set(self, name)
can_none = len(props[name]) > 4 and AOBJECT_CAN_NONE in props[name][4]
no_change = len(props[name]) != 4 and (len(props[name]) < 4 or\
AOBJECT_NO_CHANGE in props[name][4])
if get is None :
if not hasattr(self, name) :
if can_none :
self.__dict__[name] = None
else :
self.__dict__[name] = cast('')
self.__dict__['get_'+name] = make_get(self, name, can_none)
get = self.__getattribute__('get_'+name)
props[name][1] = get
if change is None and not no_change :
self.__dict__['change_'+name] = make_change(self, name)
change = self.__getattribute__('change_'+name)
props[name][0] = change
self.aesthete_properties[(name,aname)] = props[name]
pref = None
if (change!=None) :
pref = self.get_preference(name)
if pref != None : change(cast(pref)); pref = cast(pref)
val = get()
if self.row_changed_conn > 0 : self.property_store.handler_block(self.row_changed_conn)
self.property_store.append(parent, [name, aname, val, (change!=None), (change==None), name + ' [' + aname + ']',\
pref==val])
if self.row_changed_conn > 0 : self.property_store.handler_unblock(self.row_changed_conn)
if self.absorber is not None :
self.absorber.append_properties(props,
None if self.absorber_as_self else aname)
def aes_append_status(self, other, string) :
status_bar = get_status_bar()
status_bar.push(self.status_id, "[" + self.get_aname_nice() + "] " + string)
def display_plotwidget(self, other, string):
self.push(self.plotwidget_cid, "[Plotter] " + string)
def do_property_change_under_(self, iter, prop, val, aname) :
for row in iter :
if row[0] == prop and row[1] == aname :
is_pref = self.get_preference(prop) == row[2]
if row[6] != is_pref : row[6] = is_pref
#if prop == 'yhide_oom' : print str(row[6]) + str(is_pref)
if val != None :
row[2] = val
if self.aesthete_properties[(prop,aname)][2] :
self.log(2, '[' + prop + '] set to [' + val + ']')
return True
return False
def do_property_change(self, other, prop, val, aname = None):
if aname == None : aname = self.get_aname()
done = self.do_property_change_under_(self.property_store, prop, val, aname)
if not done :
for row in self.property_store :
if self.property_store.iter_n_children(row.iter) > 0 and row[1] == aname :
self.do_property_change_under_(row.iterchildren(), prop, val, aname)
absorbed = None
def absorb_properties(self, absorbee, as_self = True) :
if self.absorber is not None :
self.absorber.absorb_properties(absorbee, as_self=as_self)
return
absorbee.set_property_manager(False)
absorbee.absorber_conn = absorbee.connect("aesthete-property-change", self.do_property_change)
absorbee.absorber_ann_conn = absorbee.connect("aesthete-aname-nice-change", self.update_absorbee_aname_nice)
absorbee.absorber_as = self.get_aname() if as_self else absorbee.get_aname()
absorbee.absorber_as_self = as_self
absorbee.absorber = self
#FIXME: is this right?
absorbee.property_store.disconnect(absorbee.row_changed_conn)
absorbee.row_changed_conn = -1
self.append_properties(absorbee.get_all_aesthete_properties(), absorbee.absorber_as)
new_win = absorbee.get_method_windows()
absorbee.absorber_win = new_win
if new_win.get_parent() : new_win.get_parent().remove(new_win)
self.add_method_window(new_win)
#self.method_window.show_all()
self.method_window.show()
for absabs in absorbee.absorbed :
absorbee.rinse_properties(absabs)
self.absorb_properties(absabs, as_self=absabs.absorber_as_self)
self.absorbed.append(absorbee)
def rinse_properties(self, absorbee) :
absorbee.disconnect(absorbee.absorber_conn)
absorbee.disconnect(absorbee.absorber_ann_conn)
for absabs in absorbee.absorbed :
self.rinse_properties(absabs)
for prop in absorbee.get_all_aesthete_properties() :
aname = self.get_aname() \
if absorbee.absorber_as_self else\
absorbee.get_aname()
del self.aesthete_properties[(prop,aname)]
for row in self.property_store :
if row[0] == prop and row[1] == absorbee.absorber_as :
self.property_store.remove(row.iter)
for row in self.property_store :
if row[1] == absorbee.get_aname() and row.iterchildren() != None :
self.property_store.remove(row.iter)
self.remove_method_window(absorbee.absorber_win)
absorbee.absorber = None
absorbee.absorber_win = None
absorbee.absorber_as = None
absorbee.absorber_as_self = None
absorbee.absorber_conn = -1
absorbee.set_property_manager(True)
self.log(2, absorbee.get_aname_nice() + ' rinsed')
self.absorbed.remove(absorbee)
if self.absorber is not None :
self.absorber.rinse_properties(absorbee)
def get_new_property_view(self):
if not self.property_manager : return None
property_prop_rend = gtk.CellRendererText(); property_prop_col = gtk.TreeViewColumn('Property', property_prop_rend)
property_prop_col.add_attribute(property_prop_rend, 'text', 0); property_prop_col.set_expand(True)
property_val_rend = gtk.CellRendererText()
property_val_col = gtk.TreeViewColumn('Value', property_val_rend)
property_pref_rend = gtk.CellRendererToggle()
property_pref_col = gtk.TreeViewColumn('Preference', property_pref_rend)
property_val_rend.set_property('foreground', '#AAAAAA')
property_val_col.add_attribute(property_val_rend, 'text', 2);
property_val_col.add_attribute(property_val_rend, "editable", 3)
property_val_col.add_attribute(property_val_rend, "foreground-set", 4)
property_val_rend.connect('edited', \
(lambda cell, path, new : (self.property_store.set_value(self.property_store.get_iter(path),2,new))))
property_val_col.set_expand(True)
property_pref_col.add_attribute(property_pref_rend, 'active', 6)
property_pref_rend.connect('toggled', \
(lambda cell, path : self.set_preference(self.property_store.get_value(self.property_store.get_iter(path),0), \
self.property_store.get_value(self.property_store.get_iter(path),2) if (not cell.get_active()) else None)))
property_view = gtk.TreeView(self.property_store)
property_view.append_column(property_prop_col); property_view.append_column(property_val_col)
property_view.append_column(property_pref_col)
property_view.set_tooltip_column(5)
property_view.show_all()
swin = gtk.ScrolledWindow()
swin.add(property_view)
swin.show()
return swin
# Whatever this returns should have a save_preferences method
# such as a PreferencesTableMaker table
def get_preferences_window(self) :
return None
def get_method_window(self) :
return None
def remove_method_window(self, win) :
if win : self.method_window.remove(win); self.method_window.show()
def add_method_window(self, win) :
if win : self.method_window.pack_start(win, False, False); self.method_window.show()#; self.method_window.show()
def get_method_windows(self) :
return self.method_window
class ObjectDictionary(gobject.GObject) :
__gsignals__ = { "add" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, ( gobject.TYPE_STRING, gobject.TYPE_STRING )),
"remove" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, ( gobject.TYPE_STRING, gobject.TYPE_STRING )) }
dictionary = {}
to_remove = {}
page_matrix = {}
liststores = {}
view_page_matrix = {}
notebook = gtk.Notebook()
view_notebook = gtk.Notebook()
root_counts = {}
props_butts = {}
icon_size = None
concise = False
def __init__ (self) :
gobject.GObject.__init__(self)
self.notebook.set_tab_pos(gtk.POS_LEFT)
self.notebook.set_property("scrollable", True)
self.notebook.set_property("enable-popup", True)
self.view_notebook.set_property("scrollable", True)
self.view_notebook.set_property("enable-popup", True)
self.view_notebook.connect("switch-page", self.do_view_change_current_page)
self.icon_size = gtk.icon_size_register("mini", 2, 2)
self.useful_vars = gtk.ListStore(gobject.TYPE_STRING,
gobject.TYPE_STRING,
gobject.TYPE_STRING)
selected_source = None
def selected_source_change(self, aname) :
if aname is None :
return
self.selected_source = aname
self.emit('aesthete-selected-source-change')
def try_active_source_action(self) :
active_view = self.active_view
if not self.selected_source or not active_view or \
not active_view.source_action :
return
active_view.source_action(self.selected_source)
#active_view.grab_focus()
active_view = None
def do_view_change_current_page(self, nb, page, pn) :
obj = nb.get_nth_page(pn)
po = self.page_matrix[obj.get_aname()]
ppn = self.notebook.page_num(po)
if ppn != -1 : self.notebook.set_current_page(ppn)
old_view = self.active_view
if old_view is not None and old_view.ui_merge_id is not None :
ui_manager.remove_ui(old_view.ui_merge_id)
if old_view.ui_action_group is not None :
ui_manager.remove_action_group(old_view.ui_action_group)
if obj.ui_action_group is not None :
ui_manager.insert_action_group(obj.ui_action_group)
obj.ui_merge_id = \
ui_manager.add_ui_from_string(obj.ui_ui_string)
#Not necessary while tabs do not accept focus
#obj.grab_focus()
self.active_view = obj
def get_notebook(self) : return self.notebook
def get_viewnotebook(self) : return self.view_notebook
def assign(self, obj):
name_root = obj.get_aname_root()
if name_root in self.root_counts : self.root_counts[name_root] += 1
else : self.root_counts[name_root] = 1
name_num = self.root_counts[name_root]
#for oobj_name in self.dictionary :
# oobj = self.dictionary[oobj_name]
# if oobj.get_aname_root() == name_root and oobj.get_aname_num() >= name_num :
# name_num = oobj.get_aname_num() + 1
obj.set_aname_num(name_num)
aname = obj.get_aname()
aname_nice = obj.aname_root
if name_num > 1 : aname_nice += ' (' + str(name_num) + ')'
obj.aname_nice = aname_nice
self.dictionary[aname] = obj
def add(self, obj, show):
aname = obj.get_aname()
if aname == None : self.assign(obj)
view = obj.get_new_property_view()
self.page_matrix[aname] = None
self.set_show(obj, show)
obj.connect("aesthete-aname-nice-change", self.update_label)
obj.connect("aesthete-aname-nice-change", self.update_liststores)
lss = set(self.liststores.keys()) & set(obj.mes)
for ls in lss :
ls = self.liststores[ls]
ls.append((obj.get_aname(), obj.get_aname_nice(),
obj.get_aesthete_xml()))
useful_vars = obj.get_useful_vars()
if useful_vars is not None :
for var in useful_vars :
self.useful_vars.append((obj.get_aname(), var, useful_vars[var]))
self.emit("add", aname, obj.get_aname_root())
def update_liststores(self, obj, aname, aname_nice) :
lss = set(self.liststores.keys()) & set(obj.mes)
for ls in lss :
ls = self.liststores[ls]
for row in ls :
if row[0] == aname :
row[1] = aname_nice
row[2] = obj.get_aesthete_xml()
def update_label(self, obj, aname, aname_nice) :
page = self.page_matrix[aname]
if page != None :
self.props_butts[aname].get_children()[0].set_text(obj.get_aname_nice())
self.notebook.set_menu_label_text(page, obj.get_aname_nice())
#page.get_children()[1].set_label(obj.get_aname_nice()+' Toolbox')
# self.notebook.set_tab_label(page, self.make_label(obj))
if obj.view_object and self.view_notebook.page_num(obj) >= 0 :
self.view_notebook.set_tab_label(obj, self.make_label(obj))
def remove_widgets(self, obj) :
aname = obj.get_aname()
page = self.page_matrix[aname]
if page != None :
method_frame = page.get_children()[1]
method_frame.remove(method_frame.get_children()[0])
self.notebook.remove(page); self.page_matrix[aname] = None
if obj.view_object and self.view_notebook.page_num(obj) >= 0 :
self.view_notebook.remove(obj)
def make_label(self, obj) :
label = gtk.Label(obj.get_aname_nice()); label.set_tooltip_text(obj.get_aname())
return label
def show_props(self, props, show) :
if show : props.show()
else : props.hide()
def _make_full_label(self, obj) :
label = self.make_label(obj)
full_label = gtk.HBox()
full_label.pack_start(label, True, True)
self.props_butts[obj.get_aname()] = full_label
killv = gtk.VBox()
kill = gtk.Button()
kill.set_relief(gtk.RELIEF_NONE)
kill_im = gtk.Image(); kill_im.set_from_stock(gtk.STOCK_CLOSE, self.icon_size)
kill.add(kill_im); kill.set_size_request(15, 15)
kill.connect("clicked", lambda o : obj.aes_remove())
killv.pack_start(kill, False)
full_label.pack_start(killv, False)
full_label.show_all()
return full_label
def set_show(self, obj, show):
aname = obj.get_aname()
if aname not in self.page_matrix : return
page = self.page_matrix[aname]
if not show : self.remove_widgets(obj)
if show and page == None :
prop_view = obj.get_new_property_view()
prop_view.hide()
meth_view = gtk.Frame()#obj.get_aname_nice()+" Toolbox")
meth_view.set_shadow_type(gtk.SHADOW_NONE)
meth_view.add(obj.get_method_windows())
#meth_view = obj.get_methods_windows()
button_row = gtk.HButtonBox()
button_row.set_layout(gtk.BUTTONBOX_START)
show_props = gtk.ToggleButton()
show_props_im = gtk.Image(); show_props_im.set_from_stock(gtk.STOCK_EDIT, gtk.ICON_SIZE_SMALL_TOOLBAR)
show_props.add(show_props_im)
show_props.connect("toggled", lambda o : self.show_props(prop_view, o.get_active()))
show_props.show_all()
button_row.pack_start(show_props, False)
if not self.concise : button_row.show()
view = gtk.VBox(); view.pack_start(prop_view); view.pack_start(meth_view); view.pack_start(button_row, False)
if view :
full_label = self._make_full_label(obj)
self.page_matrix[aname] = view
notebook_child = self.notebook.append_page(view, full_label)
self.notebook.set_menu_label_text(view, obj.get_aname_nice())
#meth_view.show_all()
meth_view.show()
view.show()
if show and obj.view_object and self.view_notebook.page_num(obj) == -1 :
full_label = self._make_full_label(obj)
pn = self.view_notebook.append_page(obj, full_label)
full_label.get_parent().set_property('can-focus', False)
self.view_notebook.set_tab_detachable(obj, True)
obj.show()
self.view_notebook.set_current_page(pn)
if show and page is not None :
pn = self.notebook.page_num(page)
self.notebook.set_current_page(pn)
def set_active(self, obj, properties = True) :
if obj.view_object :
pn = self.view_notebook.page_num(obj)
self.view_notebook.set_current_page(pn)
if properties :
po = self.page_matrix[obj.get_aname()]
ppn = self.notebook.page_num(po)
if ppn != -1 : self.notebook.set_current_page(ppn)
def remove(self, obj):
aname = obj.get_aname()
self.remove_widgets(obj)
if self.selected_source == aname :
self.selected_source = None
lss = set(self.liststores.keys()) & set(obj.mes)
for ls in lss :
ls = self.liststores[ls]
for row in ls :
if row[0] == aname :
ls.remove(row.iter)
for row in self.useful_vars :
if row[0] == aname :
self.useful_vars.remove(row.iter)
self.to_remove[aname] = obj
del self.dictionary[aname]
self.emit("remove", aname, obj.get_aname_root())
del self.to_remove[aname]
def get_objects_by_am(self, what) :
if what == '' or what == None : return []
objs = []
for key in self.dictionary :
obj = self.dictionary[key]
if obj.am(what) : objs.append(obj)
return objs
def get_liststore_by_am(self, what):
if what in self.liststores : return self.liststores[what]
ls = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING,
gobject.TYPE_PYOBJECT)
self.liststores[what] = ls
for key in self.dictionary :
obj = self.dictionary[key]
if obj.am(what) : ls.append((obj.get_aname(),
obj.get_aname_nice(),
obj.get_aesthete_xml()))
return ls
def set_concise_notebook (self, concise) :
self.concise = concise
self.notebook.set_show_tabs(not concise)
for p in self.notebook.get_children() :
c = p.get_children()
#c[1].get_label_widget().hide() if concise else c[1].get_label_widget().show()
c[2].hide() if concise else c[2].show()
status_bar = gtk.Statusbar()
def get_status_bar() : return status_bar
object_dictionary = ObjectDictionary()
ui_manager = gtk.UIManager()
def get_object_dictionary():
return object_dictionary
def get_object_from_dictionary(name):
if name == '' or name == None : return None
if not name in object_dictionary.dictionary : return None
return object_dictionary.dictionary[name]
def get_removing_from_dictionary(name) :
if name == '' or name == None : return None
if not name in object_dictionary.to_remove : return None
return object_dictionary.to_remove[name]
def get_active_object() :
nb = object_dictionary.get_viewnotebook()
return nb.get_children()[nb.get_current_page()]
class Preferencer :
tree = None
preferences_file = paths.get_user_location() + "preferences.xml"
default_file = paths.get_share_location()+"preferences.default.xml"
pref_change = False
def __init__ (self) :
if not os.path.exists(self.preferences_file) :
try :
shutil.copyfile(self.default_file,
self.preferences_file)
except :
root = ET.Element("preferences")
head = ET.SubElement(root, "head")
program = ET.SubElement(head, "program")
program.set("name", details.get_name())
program.set("version", details.get_version())
program.set("description", details.get_description())
body = ET.SubElement(root, "body")
tree = ET.ElementTree(root)
tree.write(self.preferences_file)
self.tree = tree
if self.tree is None :
self.tree = ET.parse(self.preferences_file)
self.body = self.tree.find("body")
def write_any_preferences(self) :
if (self.pref_change) :
self.tree.write(self.preferences_file)
print "Preferences saved"
def get_preference (self, aroot, prop) :
pref = self.body.find(aroot + '/' + prop)
if pref is None : return None
return pref.get("value")
def set_preference (self, aroot, prop, val = None) :
prop = to_string(prop)
if val != None :
pe = self.body.find(aroot + '/' + prop)
if pe is None :
ao = None
for el in list(self.body) :
if el.tag == aroot : ao = el
if ao == None : ao = ET.SubElement(self.body, aroot)
pe = ET.SubElement(ao, prop)
pe.set("value", to_string(val))
else :
ao = self.body.find(aroot)
pe = self.body.find(aroot + '/' + prop)
if pe is not None :
ao.remove(pe)
if len(list(ao)) == 0 : self.body.remove(ao)
self.pref_change = True
preferencer = Preferencer()
def get_preferencer() : return preferencer
def new_tab_win(source, page, x, y) :
win = gtk.Window()
nb = gtk.Notebook()
win.add(nb)
win.show_all()
return nb
def aobject_to_xml(get_before, obj=None) :
if obj is None :
obj = get_active_object()
root = ET.Element(obj.get_aname_root())
aname = obj.get_aname()
root.set('name', aname)
root.set('type', obj.get_aname_root())
params = obj.aes_get_parameters()
if len(params) > 0 :
parameters_root = ET.SubElement(root, "parameters")
for p in params.keys() :
param = ET.SubElement(parameters_root, "parameter")
param.set("name", p)
if isinstance(params[p], AObject) :
to_get = params[p]
while to_get.absorber is not None :
to_get = to_get.absorber
get_before.append(params[p].get_aname())
param.set("aobject", params[p].get_aname())
else :
param.set("value", to_string(params[p]))
if len(obj.absorbed) > 0 :
absorbed_root = ET.SubElement(root, "absorbees")
for absorbee in obj.absorbed :
absorbee_node = aobject_to_xml(get_before, absorbee)
absorbee_node.set('as_self', to_string(absorbee.absorber_as_self))
absorbed_root.append(absorbee_node)
aes_xml = obj.get_aesthete_xml()
if aes_xml is not None :
aes_xml_root = ET.SubElement(root, "aes")
aes_xml_root.append(aes_xml.getroot())
prop_root = ET.SubElement(root, 'properties')
anames = {}
for name, aname in obj.aesthete_properties.keys() :
prop = obj.aesthete_properties[(name, aname)]
if aname not in anames :
anames[aname] = []
prop_node = ET.Element('property')
prop_node.set('name', name)
val = prop[1]()
if val is None :
continue
if isinstance(val, AObject) :
prop_node.set('aobject', val.get_aname())
else :
prop_node.set('value', to_string(prop[1]()))
anames[aname].append(prop_node)
for aname in anames.keys() :
aname_node = ET.SubElement(prop_root, "object")
aname_node.set("name", aname)
for prop_node in anames[aname] :
aname_node.append(prop_node)
return root
def save_state(uri) :
gf = gio.File(uri=uri)
f = gf.replace('', False)
obj = get_active_object()
get_before = [obj.get_aname()]
root = ET.Element("aobjects")
while len(get_before) > 0 :
get_obj = get_object_from_dictionary(get_before[0])
aobject_root = aobject_to_xml(get_before, get_obj)
if get_obj.get_aname() in get_before :
get_before.remove(get_obj.get_aname())
root.insert(0, aobject_root)
tree = ET.ElementTree(root)
tree.write(f, pretty_print=True)
f.close()
def xml_to_aobject(got_before, root, parent=None, env=None) :
aname_root = root.get('type')
parameters_root = root.find('parameters')
params = {}
if parameters_root is not None :
for parameter_node in parameters_root :
aname = parameter_node.get('aobject')
if aname is not None and aname in got_before.keys() :
params[parameter_node.get('name')] = \
get_object_from_dictionary(got_before[aname])
else :
params[parameter_node.get('name')] = \
parameter_node.get("value")
obj = None
if parent is not None :
obj = parent.aes_add_a(aname_root, **params)
if obj is None :
obj = aname_root_catalog[aname_root](env=env, **params)
aname_old = root.get('name')
got_before[aname_old] = obj.get_aname()
absorbed_root = root.find('absorbees')
absorbee_as_self = {}
if absorbed_root is not None :
for absorbee_node in absorbed_root :
absorbee = xml_to_aobject(got_before, absorbee_node, parent=obj, env=env)
got_before[absorbee_node.get('name')] = absorbee.get_aname()
as_self = cast_to_bool(absorbee_node.get('as_self'))
absorbee_as_self[absorbee_node.get('name')] = as_self
if absorbee not in obj.absorbed :
obj.absorb_properties(absorbee, as_self=as_self)
prop_root = root.find('properties')
for aname_node in prop_root :
child_aname_old = aname_node.get('name')
for prop_node in aname_node :
if child_aname_old in absorbee_as_self and absorbee_as_self[child_aname_old] :
aname = aname_old
else :
aname = got_before[child_aname_old]
prop_aname = prop_node.get('aname')
if prop_aname is not None and prop_aname in got_before.keys() :
val = get_object_from_dictionary(got_before[aname])
else :
val = prop_node.get('value')
obj.change_property(prop_node.get('name'),
val,
aname=aname)
aes_xml = root.find('aes')
if aes_xml is not None :
obj.set_aesthete_xml(ET.ElementTree(aes_xml[0]))
return obj
def open_state(uri, env) :
gf = gio.File(uri=uri)
f = AesFile(gf)
tree = ET.parse(f)
f.close()
root = tree.getroot()
got_before = {}
for aobject_node in root :
obj = xml_to_aobject(got_before, aobject_node, env=env)
gtk.notebook_set_window_creation_hook(new_tab_win)
gobject.signal_new("aesthete-logger", AObject, gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, (gobject.TYPE_STRING,gobject.TYPE_INT,gobject.TYPE_STRING,))
gobject.signal_new("aesthete-aname-nice-change", AObject, gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, (gobject.TYPE_STRING, gobject.TYPE_STRING,))
gobject.signal_new("aesthete-property-change", AObject, gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, (gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING))
gobject.signal_new("aesthete-selected-source-change", ObjectDictionary, gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, ())
aname_root_catalog = {} | Aesthete | /Aesthete-0.4.2.tar.gz/Aesthete-0.4.2/aobject/aobject.py | aobject.py |
# Aesthetics
This repo provides:
* Fisher Vector implementation
* AVA (Image Aesthetic Visual Analysis) dataset and fast multi-threaded downloader
# Downloader
```sh
./download.py --help
./download.py ava
```
<!--# Downloaded dataset-->
<!--ECCV 2016:-->
<!--* [Dataset Link (2 GB)](dataset_link)-->
<!--* [Dataset 256x256 size cropped preview (132 MB)](dataset_preview_link)-->
<!--[dataset_link]: https://drive.google.com/open?id=0BxeylfSgpk1MN1hUNHk1bDhYRTA-->
<!--[dataset_preview_link]: https://drive.google.com/open?id=0BxeylfSgpk1MU2RsVXo3bEJWM2c-->
| Aesthetics | /Aesthetics-0.1.1.tar.gz/Aesthetics-0.1.1/README.txt | README.txt |
# Aesthetics
This repo provides:
* Fisher Vector implementation
* AVA (Image Aesthetic Visual Analysis) dataset and fast multi-threaded downloader
# Downloader
```sh
./download.py --help
./download.py ava
```
<!--# Downloaded dataset-->
<!--ECCV 2016:-->
<!--* [Dataset Link (2 GB)](dataset_link)-->
<!--* [Dataset 256x256 size cropped preview (132 MB)](dataset_preview_link)-->
<!--[dataset_link]: https://drive.google.com/open?id=0BxeylfSgpk1MN1hUNHk1bDhYRTA-->
<!--[dataset_preview_link]: https://drive.google.com/open?id=0BxeylfSgpk1MU2RsVXo3bEJWM2c-->
| Aesthetics | /Aesthetics-0.1.1.tar.gz/Aesthetics-0.1.1/README.md | README.md |
import glob
import numpy as np
import os
import tqdm
from collections import OrderedDict
from concurrent.futures import ProcessPoolExecutor
from scipy.stats import multivariate_normal
class FisherVector(object):
"""
Fisher Vector implementation using cv2 v3.2.0+ and python3.
Key Methods:
* fisher_vector_of_file(self, filename): Returns the fisher vector for given image file
* get_fisher_vectors_from_folder(self, folder, limit): Returns fisher vectors for all images in given folder
* features(self, folder, limit): Returns fisher vectors for all images in subfolders of given folder
References used below:
[1]: Image Classification with the Fisher Vector: https://hal.inria.fr/file/index/docid/830491/filename/journal.pdf
[2]: http://www.vlfeat.org/api/gmm-fundamentals.html
"""
def __init__(self, gmm):
"""
:param aesthetics.fisher.Gmm gmm: Trained gmm to be used
"""
self.gmm = gmm
def features(self, folder, limit):
"""
:param str folder: Folder Name
:param int limit: Number of images to read from each subfolder
:return: fisher vectors for images in each subfolder of given folder
:rtype: np.array
"""
folders = sorted(glob.glob(folder + "/*"))
features = OrderedDict([(f, self.get_fisher_vectors_from_folder(f, limit)) for f in folders])
return features
def get_fisher_vectors_from_folder(self, folder, limit):
"""
:param str folder: Folder Name
:param int limit: Number of images to read from each folder
:return: fisher vectors for images in given folder
:rtype: np.array
"""
files = glob.glob(folder + "/*.jpg")[:limit]
with ProcessPoolExecutor() as pool:
futures = pool.map(self._worker, files)
desc = 'Creating Fisher Vectors {} images of folder {}'.format(len(files), os.path.split(folder)[-1])
futures = tqdm.tqdm(futures, total=len(files), desc=desc, unit='image')
vectors = [f for f in futures if f is not None and len(f) > 0]
max_shape = np.array([v.shape[0] for v in vectors]).max()
vectors = [v for v in vectors if v.shape[0] == max_shape]
# return np.array(vectors) # Can't do np.float32, because all images may not have same number of features
return np.float32(vectors)
def _worker(self, *arg, **kwargs):
try:
return self.fisher_vector_of_file(*arg, **kwargs)
except Exception as e:
# import pdb; pdb.post_mortem()
return None
def fisher_vector_of_file(self, filename):
"""
:param str filename: Name of the file
:return: fisher vector of given file
:rtype: np.array
"""
def section_fisher(img_section, full_fisher):
sec_fisher = self.fisher_vector_of_image(img_section)
if sec_fisher is None:
sec_fisher = np.zeros(full_fisher.shape)
return sec_fisher
import cv2
img = cv2.imread(filename)
full_fisher = self.fisher_vector_of_image(img)
x, _, _ = img.shape
loc_mid = int(x / 3)
loc_bottom = int(2 * x / 3)
top_fisher = section_fisher(img[0:loc_mid], full_fisher)
middle_fisher = section_fisher(img[loc_mid + 1:loc_bottom], full_fisher)
bottom_fisher = section_fisher(img[loc_bottom + 1:x], full_fisher)
return np.concatenate((full_fisher, top_fisher, middle_fisher, bottom_fisher))
def fisher_vector_of_image(self, img):
"""
:param np.array img: Img Array generated by cv2.imread
:return: fisher vector of given img array
:rtype: np.array
"""
from aesthetics.fisher import Descriptors
descriptors = Descriptors()
img_descriptors = descriptors.image(img)
if img_descriptors is not None:
return self._fisher_vector(img_descriptors)
else:
return np.empty(0)
def _fisher_vector(self, img_descriptors):
"""
:param img_descriptors: X
:return: fisher vector
:rtype: np.array
"""
means, covariances, weights = self.gmm.means, self.gmm.covariances, self.gmm.weights
s0, s1, s2 = self._likelihood_statistics(img_descriptors)
T = img_descriptors.shape[0]
diagonal_covariances = np.float32([np.diagonal(covariances[k]) for k in range(0, covariances.shape[0])])
""" Refer page 4, first column of reference [1] """
g_weights = self._fisher_vector_weights(s0, s1, s2, means, diagonal_covariances, weights, T)
g_means = self._fisher_vector_means(s0, s1, s2, means, diagonal_covariances, weights, T)
g_sigma = self._fisher_vector_sigma(s0, s1, s2, means, diagonal_covariances, weights, T)
fv = np.concatenate([np.concatenate(g_weights), np.concatenate(g_means), np.concatenate(g_sigma)])
fv = self.normalize(fv)
return fv
def _likelihood_statistics(self, img_descriptors):
"""
:param img_descriptors: X
:return: 0th order, 1st order, 2nd order statistics
as described by equation 20, 21, 22 in reference [1]
"""
def likelihood_moment(x, posterior_probability, moment):
x_moment = np.power(np.float32(x), moment) if moment > 0 else np.float32([1])
return x_moment * posterior_probability
def zeros(like):
return np.zeros(like.shape).tolist()
means, covariances, weights = self.gmm.means, self.gmm.covariances, self.gmm.weights
normals = [multivariate_normal(mean=means[k], cov=covariances[k]) for k in range(0, len(weights))]
""" Gaussian Normals """
gaussian_pdfs = [np.array([g_k.pdf(sample) for g_k in normals]) for sample in img_descriptors]
""" u(x) for equation 15, page 4 in reference 1 """
statistics_0_order, statistics_1_order, statistics_2_order = zeros(weights), zeros(weights), zeros(weights)
for k in range(0, len(weights)):
for index, sample in enumerate(img_descriptors):
posterior_probability = FisherVector.posterior_probability(gaussian_pdfs[index], weights)
statistics_0_order[k] = statistics_0_order[k] + likelihood_moment(sample, posterior_probability[k], 0)
statistics_1_order[k] = statistics_1_order[k] + likelihood_moment(sample, posterior_probability[k], 1)
statistics_2_order[k] = statistics_2_order[k] + likelihood_moment(sample, posterior_probability[k], 2)
return np.array(statistics_0_order), np.array(statistics_1_order), np.array(statistics_2_order)
@staticmethod
def posterior_probability(u_gaussian, weights):
""" Implementation of equation 15, page 4 from reference [1] """
probabilities = np.multiply(u_gaussian, weights)
probabilities = probabilities / np.sum(probabilities)
return probabilities
@staticmethod
def _fisher_vector_weights(statistics_0_order, s1, s2, means, covariances, w, T):
""" Implementation of equation 31, page 6 from reference [1] """
return np.float32([((statistics_0_order[k] - T * w[k]) / np.sqrt(w[k])) for k in range(0, len(w))])
@staticmethod
def _fisher_vector_means(s0, statistics_1_order, s2, means, sigma, w, T):
""" Implementation of equation 32, page 6 from reference [1] """
return np.float32([(statistics_1_order[k] - means[k] * s0[k]) /
(np.sqrt(w[k] * sigma[k])) for k in range(0, len(w))])
@staticmethod
def _fisher_vector_sigma(s0, s1, statistics_2_order, means, sigma, w, T):
""" Implementation of equation 33, page 6 from reference [1] """
return np.float32([(statistics_2_order[k] - 2 * means[k] * s1[k] + (means[k] * means[k] - sigma[k]) * s0[k]) /
(np.sqrt(2 * w[k]) * sigma[k]) for k in range(0, len(w))])
@staticmethod
def normalize(fisher_vector):
""" Power normalization based on equation 30, page 5, last para; and
is used in step 3, algorithm 1, page 6 of reference [1] """
v = np.sign(fisher_vector) * np.sqrt(abs(fisher_vector)) # Power normalization
return v / np.sqrt(np.dot(v, v)) # L2 Normalization | Aesthetics | /Aesthetics-0.1.1.tar.gz/Aesthetics-0.1.1/aesthetics/fisher/fisher.py | fisher.py |
import glob
import cv2 # v3.2.0
import numpy as np
class Gmm(object):
""" K-component Gaussian Mixture Model """
def __init__(self, K):
"""
As described in section 2.2, para 3 of reference [1]
:param int K: Number of components
"""
self.K = K
self.means = None
""" Mean Vector """
self.covariances = None
""" Covariance Matrix """
self.weights = None
""" Mixture Weights """
def generate(self, input_folder, limit):
"""
Reads all the images files from the sub-folders of the 'input_folder'
and creates a gmm definition (mean, covariances, weights) from those
image's features.
:param str input_folder: Folder whose subfolder contains images
:param int limit: Number of images to be read
:return: tuple(mean, covariance, weights)
:rtype: tuple
"""
from aesthetics.fisher import Descriptors
descriptor = Descriptors()
img_descriptors = [descriptor.folder(folder, limit) for folder in sorted(glob.glob(input_folder + '/*'))]
max_shape = np.array([v.shape[0] for v in img_descriptors]).max()
img_descriptors = list(filter(lambda x: x is not None and x.shape[0] == max_shape, img_descriptors))
words = np.concatenate(img_descriptors)
print("Training GMM of size", self.K)
self.means, self.covariances, self.weights = self.train_expectation_maximisation(words, self.K)
# Throw away gaussians with weights that are too small:
self.means = self._remove_too_small(self.means, self.weights)
self.covariances = self._remove_too_small(self.covariances, self.weights)
self.weights = self._remove_too_small(self.weights, self.weights)
self.save()
return self.means, self.covariances, self.weights
def _remove_too_small(self, values, weights):
threshold = 1.0 / self.K
return np.float32([m for k, m in zip(range(0, len(weights)), values) if weights[k] > threshold])
def load(self, folder=''):
"""
Load gmm definition from self.save'd folder data
:param str folder: Folder name
:return: nothing
"""
import os
files = ['means.gmm.npy', 'covariances.gmm.npy', 'weights.gmm.npy']
self.means, self.covariances, self.weights = map(lambda file: np.load(file),
map(lambda s: os.path.join(folder, s), files))
def save(self):
"""
Save gmm definition to current folder
:return: nothing
"""
np.save("means.gmm", self.means)
np.save("covariances.gmm", self.covariances)
np.save("weights.gmm", self.weights)
@staticmethod
def train_expectation_maximisation(descriptors, K):
""" See reference [2] """
em = cv2.ml.EM_create()
em.setClustersNumber(K)
em.trainEM(descriptors)
return np.float32(em.getMeans()), np.float32(em.getCovs()), np.float32(em.getWeights())[0] | Aesthetics | /Aesthetics-0.1.1.tar.gz/Aesthetics-0.1.1/aesthetics/fisher/gmm.py | gmm.py |
import glob
from concurrent.futures import ProcessPoolExecutor
import cv2 # v3.2.0
import numpy as np
import tqdm
class Descriptors(object):
"""
Convert image to features
Key Methods:
* image(self, img): Given a img array, returns its descriptors
* image_file(self, filename): Given a image filename, returns its descriptors
"""
def __init__(self):
self.feature_transform = None
def folder(self, folder, limit):
"""
:param folder: Name of the folder containing images
:type folder: str
:param limit: Number of images to be read from given folder
:type limit: int
:return: List of descriptors of the given images
:rtype: np.array
"""
files = glob.glob(folder + "/*.jpg")[:limit]
with ProcessPoolExecutor() as executor:
futures = executor.map(self.image_file, files)
futures = tqdm.tqdm(futures, total=len(files), desc='Calculating descriptors')
descriptors = [f for f in futures]
# descriptors = [self.image_file(file) for file in files]
descriptors = list(filter(lambda x: x is not None, descriptors))
return np.concatenate(descriptors)
def image_file(self, filename):
"""
Refer section 2.2 of reference [1]
:param filename: Name of the image to be read
:type filename: str
:return: Descriptors of the given image
:rtype: np.array
"""
img = cv2.imread(filename, 0)
return self.image(img)
def image(self, img):
"""
:param img: Image array read using cv2.imread
:type img: np.array
:return: Descriptors of the given image
:rtype: np.array
"""
# img = cv2.resize(img, (256, 256))
if self.feature_transform is None:
self.feature_transform = cv2.xfeatures2d.SIFT_create()
# self.feature_transform = cv2.ORB_create()
_, descriptors = self.feature_transform.detectAndCompute(img, None)
return descriptors | Aesthetics | /Aesthetics-0.1.1.tar.gz/Aesthetics-0.1.1/aesthetics/fisher/descriptors.py | descriptors.py |
import logging
import click
import numpy as np
import pandas as pd
from sklearn import svm
from sklearn import ensemble
def ordered_dict_to_x_y(features):
logging.info('Key ordering: %s', list(features.keys()))
feature_values = list(features.values())
X = np.concatenate(feature_values)
Y = np.concatenate([np.float32([i] * len(v)) for i, v in zip(range(0, len(features)), feature_values)])
return X, Y
def train(features):
X, Y = ordered_dict_to_x_y(features)
pd.DataFrame(X).to_csv('features.csv')
clf = get_classification()
clf.fit(X, Y)
pd.to_pickle(clf, 'classification.pkl')
return clf
def get_classification():
clf = svm.SVC()
clf = ensemble.GradientBoostingClassifier()
return clf
def success_rate(classifier, features):
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
from sklearn.metrics import precision_score
logging.info('Applying the classifier...')
feature_values = list(features.values())
X = np.concatenate(feature_values)
Y = np.concatenate([np.float32([i] * len(v)) for i, v in zip(range(0, len(feature_values)), feature_values)])
y_pred = classifier.predict(X)
# logging.debug('Predictions:\n%s', list(zip(Y, y_pred)))
logging.info('Confusion Matrix:\n%s', confusion_matrix(y_true=Y, y_pred=y_pred))
report = classification_report(y_true=Y, y_pred=y_pred, target_names=list(features.keys()))
logging.info('Classification Report:\n%s', report)
return precision_score(y_true=Y, y_pred=y_pred)
def predict_from_url(url, *args, **kwargs):
import os
os.system('wget {}'.format(url))
image_path=os.path.split(url)[-1]
import cv2
img = cv2.imread(image_path)
cv2.imwrite(image_path, cv2.resize(img, (500, 500)))
return predict_image(image_path=image_path, *args, **kwargs)
def predict_image(classifier, gmm, image_path):
from aesthetics.fisher import FisherVector
fv = FisherVector(gmm)
vector = fv.fisher_vector_of_file(image_path)
return classifier.predict(vector)
@click.command()
@click.option('-d', '--dir', default='.', help='Directory of images (default: ./)')
@click.option('-g', '--load-gmm', default=False, is_flag=True, help='Load gmm dictionary from pickles')
@click.option('-n', '--number', default=5, help='Number of words in gmm dictionary')
@click.option('-l', '--limit', default=50, help='Number of images to read')
@click.option('-v', '--validation-dir', default=None, help='Directory of images (default: None)')
def main(dir, load_gmm, number, limit, validation_dir):
"""
* Create a GMM using the training images.
* Use this GMM to create feature vectors of training images.
* Train an SVM on training images.
* Predict using SVM on training images.
"""
from aesthetics.fisher import Gmm
from aesthetics.fisher import FisherVector
logging.debug('dir=%s, load_gmm=%s, number=%s, limit=%s, val_dir=%s', dir, load_gmm, number, limit, validation_dir)
gmm = Gmm(K=number)
if load_gmm:
gmm.load()
else:
gmm.generate(input_folder=dir, limit=limit)
fisher_vector = FisherVector(gmm)
features = fisher_vector.features(dir, limit)
classifier = train(features)
rate = success_rate(classifier, features)
logging.info("Self test success rate is %.2f", rate)
if validation_dir is not None:
validation_features = fisher_vector.features(validation_dir, limit)
rate = success_rate(classifier, validation_features)
logging.info("Validation test success rate is %.2f", rate)
return 0
if __name__ == '__main__':
import sys
logging.basicConfig(level=logging.DEBUG)
sys.exit(main()) | Aesthetics | /Aesthetics-0.1.1.tar.gz/Aesthetics-0.1.1/aesthetics/fisher/cli.py | cli.py |
# AezaAPI
### Описание
Библиотека для использования API сервиса Aeza
### Методы
- <b>get_balance</b> - Узнать баланс
- <b>invoice_card</b> - Пополнить баланс картой
- <b>invoice_qiwi</b> - Пополнить баланс через Qiwi
- <b>status_server</b> - Статус серверов
- <b>add_api_key</b> - Создание API-ключа
- <b>get_api_key</b> - Получение списка API-ключей
- <b>add_ssh_key</b> - Добавление SSH-ключа
- <b>get_ssh_key</b> - Получение списка SSH-ключей
- <b>get_my_server</b> - Получение информации приобретённых серверов
- <b>get_product</b> - Получение списка продуктов
- <b>get_total_product</b> - Количество продуктов
- <b>ordering_service</b> - Покупка сервера
### Примеры
```python
from ApiAeza import aeza
TOKEN = aeza.AuthAeza('API-KEY')
def test() -> str:
"""Функция проверит баланс,
если он меньше 50 рублей,
то создаcт счёт на сумму 500 рублей
при этом метод сразу возвращает ссылку для оплаты."""
if TOKEN.get_balance() < 50:
return TOKEN.invoice_card(500)
return 'Всё хорошо'
```
<b>Покупка сервера</b>
```python
from ApiAeza import aeza
TOKEN = aeza.AuthAeza('API-KEY')
def test() -> str:
"""Покупка сервера."""
return TOKEN.ordering_service(1, # Количество
'mount', # Срок (hour, mount, quarter_year, year, half_year)
'NameServer', # Имя сервера
3, # ID сервера (Можно узнать методом get_product)
25, # os
True) # Автопродление
```
###Начало работы
Для начала работы импортируйте библиотеку, предварительно установив её
```
pip install ApiAeza
```
```python
from ApiAeza import aeza
```
Далее инициализируйте API-ключ
```python
TOKEN = aeza.AuthAeza('API-KEY')
```
Все методы делаются через переменную в которой вы инициализировали токен
```python
TOKEN.get_my_server()
``` | AezaAPI | /AezaAPI-1.0.2.tar.gz/AezaAPI-1.0.2/README.md | README.md |
import requests
import json
class AuthAeza():
def __init__(self, key: str) -> None:
self.key = key
def token(self):
"""Инициализация токена."""
return self.key
def get_balance(self) -> float:
"""Получение баланса."""
responce = requests.get('https://core.aeza.net/api/desktop?',
headers={
'authorization': f'Bearer {self.key}'
}
)
balance: float = json.loads(
responce.text)['data']['balance']['value']
return balance
def invoice_card(self, amount: int) -> str:
"""Пополнение счёта картой."""
if amount < 100:
return 'Минимальная сумма пополнения 100 ₽'
responce = requests.post('https://core.aeza.net/api/payment/invoices?',
headers={
'authorization': f'Bearer {self.key}'
},
json={
"method": "unitpay:card", "amount": amount
}
)
link: str = json.loads(responce.text)[
'data']['transaction']['invoice']['link']
return link
def invoice_qiwi(self, amount: int) -> str:
"""Пополнение счёта через Qiwi."""
if amount < 100:
return 'Минимальная сумма пополнения 100 ₽'
responce = requests.post('https://core.aeza.net/api/payment/invoices?',
headers={
'authorization': f'Bearer {self.key}'
},
json={
"method": "lava:qiwi", "amount": amount
}
)
link: str = json.loads(responce.text)[
'data']['transaction']['invoice']['link']
return link
def status_server(self) -> dict:
"""Проверка статуса серверов."""
server: dict = {}
num: int = 0
responce = requests.get(
'https://aeza-monitor.cofob.dev/api/status-page/heartbeat/locations').text
status: float = json.loads(responce)['uptimeList']
for i in status:
percent = status[i]
server[num] = "{:.2f}".format(float(percent))
num += 1
return server
def add_api_key(self, name_key: str) -> str:
"""Создание API-ключа."""
responce = requests.post('https://core.aeza.net/api/apikeys?',
headers={
'authorization': f'Bearer {self.key}'
},
json={
'name': name_key
}
)
apikey: str = json.loads(responce.text)['data']['items'][0]['token']
return apikey
def get_api_key(self) -> dict:
"""Получение списка API-ключей"""
apikey: dict = {}
responce = requests.get('https://core.aeza.net/api/apikeys?',
headers={
'authorization': f'Bearer {self.key}'
}
)
apikeys: dict = json.loads(responce.text)['data']['items']
for i in apikeys:
apikey[i['name']] = i['token']
return apikey
def add_ssh_key(self, name, ssh_key) -> str:
"""Добавление SSH-ключа"""
responce = requests.post('https://core.aeza.net/api/sshkeys?',
headers={
'authorization': f'Bearer {self.key}'
},
json={
'name': name, 'pubKey': ssh_key
}
)
status = json.loads(responce.text)
try:
return status['error']['message']
except:
return 'Ключ успешно добавлен'
def get_ssh_key(self):
"""Получение списка SSH-ключей."""
sshkey: dict = {}
responce = requests.get('https://core.aeza.net/api/sshkeys?',
headers={
'authorization': f'Bearer {self.key}'
}
)
sshkeys: dict = json.loads(responce.text)['data']['items']
for i in sshkeys:
sshkey[i['name']] = i['pubKey']
return sshkey
def get_my_server(self):
"""Получение информации приобретённых серверов."""
all_server = {}
responce = requests.get(
'https://core.aeza.net/api/services?offset=NaN&count=undefined&sort=',
headers={
'authorization': f'Bearer {self.key}'
}
)
servers: dict = json.loads(responce.text)['data']['items']
for i in servers:
try:
all_server[i['name']] = {'status': i['status'],
'ip': i['ip'],
'username': i['parameters']['username'],
'password': i['secureParameters']['data']['password'],
'type': i['product']['type'],
'id': i['id']
}
except:
...
return all_server
def get_product(self) -> dict:
"""Получение списка продуктов."""
product: dict = {}
responce = requests.get(
'https://core.aeza.net/api/services/products',
headers={
'Authorization': f'Bearer {self.key}'
}
)
products = json.loads(responce.text)
for i in products['data']['items']:
product[i['name']] = [
{'configuration': i['configuration']},
{'price': i['rawPrices']}
]
return product
def get_total_product(self) -> int:
"""Получение количества продуктов."""
responce = requests.get(
'https://core.aeza.net/api/services/products',
headers={
'Authorization': f'Bearer {self.key}'})
total = json.loads(responce.text)
return total['data']['total']
def ordering_service(self, count: int,
term: str,
name: str,
id: int,
parameters: int,
autoProlong: bool) -> bool:
"""Функция оформления сервера.
Аргументы:
count -- количество серверов (целое число)
term -- срок аренды сервера (строка)
name -- название сервера (строка)
id -- идентификатор продукта (целое число)
parameters -- параметры сервера (целое число)
autoProlong -- автоматическое продление аренды (булево значение)
Возвращает:
Возвращает строку 'Сервер оформлен', если сервер успешно оформлен,
или сообщение об ошибке в виде строки, если произошла ошибка.
"""
data = {
'count': count,
'term': term,
'name': name,
'productId': id,
'parameters': {
'os': parameters
},
'autoProlong': autoProlong,
'method': 'balance'
}
responce = requests.post(
'https://core.aeza.net/api/services/orders',
headers={
'Authorization': f'Bearer {self.key}'},
json=data
)
status = json.loads(responce.text)
try:
return status['error']['message']
except:
return 'Сервер оформлен' | AezaAPI | /AezaAPI-1.0.2.tar.gz/AezaAPI-1.0.2/ApiAeza/aeza.py | aeza.py |
Copyright (c) 2018 The Python Packaging Authority
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE. | AffilateMarketingHelp | /AffilateMarketingHelp-0.0.1-py3-none-any.whl/AffilateMarketingHelp-0.0.1.dist-info/LICENSE.md | LICENSE.md |
# Affirmations
A module that gives you a little bump of encouragement.
## Requirements
- A positive attitude
- A can-do spirit
## Installation
```bash
pip install Affirmations
```
## Usage
Decorate any function to get a random affirmation printed to stdout every time that function is run
```
from Affirmations import affirm
@affirm() # prints an affirmation to stdout 100% of the time this function is run
def hello_world():
print("hello")
@affirm(0.2) # prints an affirmation to stdout 20% of the time this function is run
def hello_world2():
print("hello")
hello_world()
```
```bash
hello
You are awesome!
``` | Affirmations | /Affirmations-0.0.10.tar.gz/Affirmations-0.0.10/README.md | README.md |
from rpy2.robjects import r, pandas2ri
from rpy2.robjects.packages import importr
import rpy2.robjects as robjects
import os
# Loading to MySQL
import sqlalchemy
from sqlalchemy import create_engine, MetaData, Table, Column, Integer, String
from sqlalchemy.types import VARCHAR
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
import pymysql
import pandas as pd
def extract(dir, cdf = "hgu133plus2cdf"):
affy = importr('affy')
annotation = importr(cdf)
print("Annotation is using {}".format(annotation))
os.chdir(dir)
datapath = os.getcwd()
datalist = [i for i in os.listdir(datapath) if i.endswith(".CEL")]
print("Catch CEL data {}".format(datalist))
cdatalist = robjects.r['as.character'](datalist)
rawdata = affy.ReadAffy(filenames=cdatalist)
rmdata = affy.rma(rawdata)
data = robjects.r['exprs'](rmdata)
df = robjects.r['data.frame'](data)
rID_list = robjects.r['rownames'](df)
df_len = len(rID_list)
print("This Platform have {} probes".format(df_len))
return df, df_len
def transform(df, df_len):
pandas2ri.activate()
r.data('df')
pandas_df = df.head(df_len)
col_name = pandas_df.columns.to_list()
col_name.insert(0, "ID")
Inputid = pandas_df.index
pandas_df.reindex(columns=col_name)
pandas_df["ID"] = Inputid
pandas_df = pandas_df.reindex(columns=col_name)
return pandas_df
def loading(user, passwd, address, dbname, tablename, data):
try:
db_connect = "mysql+pymysql://{}:{}@{}/{}".format(user, passwd, address, dbname)
engine = sqlalchemy.create_engine(db_connect, encoding='utf8', echo=True)
con = engine.connect()
pandas_df = data
pandas_df.to_sql(tablename, engine, schema=dbname, if_exists='append', index=False)
# Alter ID type
DB_Session = sessionmaker(bind=engine)
session = DB_Session()
sql_alter_idtype = "ALTER TABLE {} COLUMN ID varchar(128);".format(tablename)
session.execute(sql_alter_idtype)
# Alter ID to PK
sql_alter_idkey = "ALTER TABLE {} ADD PRIMARY KEY (ID);".format(tablename)
session.execute(sql_alter_idkey)
# session.execute("ALTER TABLE {} DROP PRIMARY KEY;".format(tablename))
con.close()
return "Loading Success"
except:
return "Loading Failed" | Affy-ETL | /Affy-ETL-0.0.6.tar.gz/Affy-ETL-0.0.6/Affy_ETL/AETL.py | AETL.py |
import json
import requests # urllib please - we're done
class AfricasTalkingGatewayException(Exception):
pass
class AfricasTalkingGateway:
def __init__(self, username, apiKey, environment = 'production'):
self.username = username
self.apiKey = apiKey
self.environment = environment
self.HTTP_RESPONSE_OK = 200
self.HTTP_RESPONSE_CREATED = 201
# Turn this on if you run into problems. It will print the raw HTTP response from our server
self.Debug = False
# Messaging methods
def sendMessage(self, to_, message_, from_ = None, bulkSMSMode_ = 1, enqueue_ = 0, keyword_ = None, linkId_ = None, retryDurationInHours_ = None):
if len(to_) == 0 or len(message_) == 0:
raise AfricasTalkingGatewayException("Please provide both to_ and message_ parameters")
parameters = {'username' : self.username,
'to': to_,
'message': message_,
'bulkSMSMode':bulkSMSMode_}
if not from_ is None :
parameters["from"] = from_
if enqueue_ > 0:
parameters["enqueue"] = enqueue_
if not keyword_ is None:
parameters["keyword"] = keyword_
if not linkId_ is None:
parameters["linkId"] = linkId_
if not retryDurationInHours_ is None:
parameters["retryDurationInHours"] = retryDurationInHours_
response = self.sendRequest(self.getSmsUrl(), parameters)
if self.responseCode == self.HTTP_RESPONSE_CREATED:
decoded = json.loads(response)
recipients = decoded['SMSMessageData']['Recipients']
if len(recipients) > 0:
return recipients
raise AfricasTalkingGatewayException(decoded['SMSMessageData']['Message'])
raise AfricasTalkingGatewayException(response)
def fetchMessages(self, lastReceivedId_ = 0):
url = "%s?username=%s&lastReceivedId=%s" % (self.getSmsUrl(), self.username, lastReceivedId_)
response = self.sendRequest(url)
if self.responseCode == self.HTTP_RESPONSE_OK:
decoded = json.loads(response)
return decoded['SMSMessageData']['Messages']
raise AfricasTalkingGatewayException(response)
# Subscription methods
def createSubscription(self, phoneNumber_, shortCode_, keyword_):
if len(phoneNumber_) == 0 or len(shortCode_) == 0 or len(keyword_) == 0:
raise AfricasTalkingGatewayException("Please supply phone number, short code and keyword")
url = "%s/create" %(self.getSmsSubscriptionUrl())
parameters = {
'username' :self.username,
'phoneNumber' :phoneNumber_,
'shortCode' :shortCode_,
'keyword' :keyword_
}
response = self.sendRequest (url, parameters)
if self.responseCode == self.HTTP_RESPONSE_CREATED:
decoded = json.loads(response)
return decoded
raise AfricasTalkingGatewayException(response)
def deleteSubscription(self, phoneNumber_, shortCode_, keyword_):
if len(phoneNumber_) == 0 or len(shortCode_) == 0 or len(keyword_) == 0:
raise AfricasTalkingGatewayException("Please supply phone number, short code and keyword")
url = "%s/delete" %(self.getSmsSubscriptionUrl())
parameters = {
'username' :self.username,
'phoneNumber' :phoneNumber_,
'shortCode' :shortCode_,
'keyword' :keyword_
}
response = self.sendRequest(url, parameters)
if self.responseCode == self.HTTP_RESPONSE_CREATED:
decoded = json.loads(response)
return decoded
raise AfricasTalkingGatewayException(response)
def fetchPremiumSubscriptions(self,shortCode_, keyword_, lastReceivedId_ = 0):
if len(shortCode_) == 0 or len(keyword_) == 0:
raise AfricasTalkingGatewayException("Please supply the short code and keyword")
url = "%s?username=%s&shortCode=%s&keyword=%s&lastReceivedId=%s" % (self.getSmsSubscriptionUrl(),
self.username,
shortCode_,
keyword_,
lastReceivedId_)
result = self.sendRequest(url)
if self.responseCode == self.HTTP_RESPONSE_OK:
decoded = json.loads(result)
return decoded['responses']
raise AfricasTalkingGatewayException(response)
# Voice methods
def call(self, from_, to_):
parameters = {
'username' : self.username,
'from' : from_,
'to': to_
}
url = "%s/call" %(self.getVoiceUrl())
response = self.sendRequest(url, parameters)
decoded = json.loads(response)
if decoded['errorMessage'] == "None":
return decoded['entries'];
raise AfricasTalkingGatewayException(decoded['errorMessage'])
def getNumQueuedCalls(self, phoneNumber_, queueName_ = None):
parameters = {
'username' :self.username,
'phoneNumbers' :phoneNumber_
}
if queueName_ is not None:
parameters['queueName'] = queueName_
url = "%s/queueStatus" %(self.getVoiceUrl())
response = self.sendRequest(url, parameters)
decoded = json.loads(response)
if decoded['errorMessage'] == "None":
return decoded['entries']
raise AfricasTalkingGatewayException(decoded['errorMessage'])
def uploadMediaFile(self, urlString_):
parameters = {
'username' :self.username,
'url' :urlString_
}
url = "%s/mediaUpload" %(self.getVoiceUrl())
response = self.sendRequest(url, parameters)
decoded = json.loads(response)
if decoded['errorMessage'] != "None":
raise AfricasTalkingGatewayException(decoded['errorMessage'])
#Airtime method
def sendAirtime(self, recipients_):
parameters = {
'username' : self.username,
'recipients' : json.dumps(recipients_)
}
url = "%s/send" %(self.getAirtimeUrl())
response = self.sendRequest(url, parameters)
decoded = json.loads(response)
responses = decoded['responses']
if self.responseCode == self.HTTP_RESPONSE_CREATED:
if len(responses) > 0:
return responses
raise AfricasTalkingGatewayException(decoded["errorMessage"])
raise AfricasTalkingGatewayException(response)
#Payment Methods
def initiateMobilePaymentCheckout(self,
productName_,
phoneNumber_,
currencyCode_,
providerChannel_,
amount_,
metadata_):
parameters = {
'username' : self.username,
'productName' : productName_,
'phoneNumber' : phoneNumber_,
'currencyCode' : currencyCode_,
'providerChannel' : providerChannel_,
'amount' : amount_,
'metadata' : metadata_
}
url = self.getMobilePaymentCheckoutUrl()
response = self.sendJSONRequest(url, json.dumps(parameters))
if self.responseCode == self.HTTP_RESPONSE_CREATED:
decoded = json.loads(response)
if decoded['status'] == 'PendingConfirmation':
return decoded['transactionId']
raise AfricasTalkingGatewayException(decoded['description'])
raise AfricasTalkingGatewayException(response)
def mobilePaymentB2CRequest(self, productName_, recipients_):
parameters = {
'username' : self.username,
'productName' : productName_,
'recipients' : recipients_
}
url = self.getMobilePaymentB2CUrl()
response = self.sendJSONRequest(url, json.dumps(parameters))
if self.responseCode == self.HTTP_RESPONSE_CREATED:
decoded = json.loads(response)
if len(decoded['entries']) > 0:
return decoded['entries']
raise AfricasTalkingGatewayException(decoded['errorMessage'])
raise AfricasTalkingGatewayException(response)
def mobilePaymentB2BRequest(self, productName_, providerData_, currencyCode_, amount_, metadata_):
if "provider" not in providerData_:
raise AfricasTalkingGatewayException("Missing field provider")
if "destinationChannel" not in providerData_:
raise AfricasTalkingGatewayException("Missing field destinationChannel")
if "destinationAccount" not in providerData_:
raise AfricasTalkingGatewayException("Missing field destinationAccount")
if "transferType" not in providerData_:
raise AfricasTalkingGatewayException("Missing field transferType")
parameters = {
'username' : self.username,
'productName' : productName_,
'provider' : providerData_['provider'],
'destinationChannel' : providerData_['destinationChannel'],
'destinationAccount': providerData_['destinationAccount'],
'transferType' : providerData_['transferType'],
'currencyCode' : currencyCode_,
'amount' : amount_,
'metadata' : metadata_
}
url = self.getMobilePaymentB2BUrl()
response = self.sendJSONRequest(url, json.dumps(parameters))
if self.responseCode == self.HTTP_RESPONSE_CREATED:
decoded = json.loads(response)
return decoded
raise AfricasTalkingGatewayException(response)
# Userdata method
def getUserData(self):
url = "%s?username=%s" %(self.getUserDataUrl(), self.username)
result = self.sendRequest(url, getUserData=True)
if self.responseCode == self.HTTP_RESPONSE_OK:
decoded = json.loads(result)
return decoded['UserData']
raise AfricasTalkingGatewayException(response)
# HTTP access method
def sendRequest(self, urlString, data_ = None, getUserData = None):
try:
headers = { 'Accept' : 'application/json',
'apikey' : self.apiKey }
if getUserData is not None:
resp = requests.get(urlString, data=data_, headers=headers)
elif data_ is not None:
resp = requests.post(urlString, data=data_, headers=headers)
else:
resp = requests.post(urlString, headers=headers)
except requests.exceptions.Timeout as e:
raise AfricasTalkingGatewayException(e.read())
except requests.exceptions.RequestException as e:
raise AfricasTalkingGatewayException(e.read())
else:
self.responseCode = resp.status_code
response = resp.text
if self.Debug:
print("Raw response: " + response)
return response
def sendJSONRequest(self, urlString, data_):
try:
headers = { 'Accept' : 'application/json',
'Content-Type' : 'application/json',
'apikey' : self.apiKey }
resp = requests.post(urlString, data = data_, headers = headers)
except requests.exceptions.Timeout as e:
raise AfricasTalkingGatewayException(e.read())
except requests.exceptions.RequestException as e:
raise AfricasTalkingGatewayException(e.read())
else:
self.responseCode = resp.status_code
response = resp.text
if self.Debug:
print("Raw response: " + response)
return response
def getApiHost(self):
if self.environment == 'sandbox':
return 'https://api.sandbox.africastalking.com'
else:
return 'https://api.africastalking.com'
def getVoiceHost(self):
if self.environment == 'sandbox':
return 'https://voice.sandbox.africastalking.com'
else:
return 'https://voice.africastalking.com'
def getPaymentHost(self):
if self.environment == 'sandbox':
return 'https://payments.sandbox.africastalking.com'
else:
return 'https://payments.africastalking.com'
def getSmsUrl(self):
return self.getApiHost() + "/version1/messaging"
def getVoiceUrl(self):
return self.getVoiceHost()
def getSmsSubscriptionUrl(self):
return self.getApiHost() + "/version1/subscription"
def getUserDataUrl(self):
return self.getApiHost() + "/version1/user"
def getAirtimeUrl(self):
return self.getApiHost() + "/version1/airtime"
def getMobilePaymentCheckoutUrl(self):
return self.getPaymentHost() + "/mobile/checkout/request"
def getMobilePaymentB2CUrl(self):
return self.getPaymentHost() + "/mobile/b2c/request"
def getMobilePaymentB2BUrl(self):
return self.getPaymentHost() + "/mobile/b2b/request" | AfricastalkingGateway | /AfricastalkingGateway-2.0.tar.gz/AfricastalkingGateway-2.0/africastalking/AfricasTalkingGateway.py | AfricasTalkingGateway.py |
from joeynmt.helpers import load_config, load_checkpoint
from joeynmt.vocabulary import Vocabulary
from joeynmt.model import build_model
from joeynmt.batch import Batch
from joeynmt.data import MonoDataset
from torchtext.legacy.data import Field # pylint: disable=no-name-in-module
from joeynmt.constants import UNK_TOKEN, PAD_TOKEN, EOS_TOKEN
from joeynmt.prediction import parse_test_args, validate_on_data
from typing import Dict
import os
import torch
from google_drive_downloader import GoogleDriveDownloader as gdd
import pandas as pd
import pkg_resources
import warnings
from spacy.lang.en import English # updated
class MasakhaneTranslate:
def __init__(self, model_path:str=None, model_name:str=None, version:str=None, device: str = 'cpu'):
"""
model_path: the directory containing your checkpoint and configuration file
model_name: name of the model. See the list of the models (directory names) from https://github.com/masakhane-io/masakhane-mt/tree/master/benchmarks
version: most of the models have several versions. Each version is provided as a subdirectory of the model's directory from https://github.com/masakhane-io/masakhane-mt/tree/master/benchmarks
device: device to use for inference ("cpu" or "cuda")
"""
self.model, self.cfg, self.src_vocab, self.use_cuda = self.load_model(model_path=model_path, model_name=model_name, version=version, device=device)
"""
cfg: configuration dictionary
batch_class: class type of batch
"""
def download_model(self, model_name:str):
print("Downloading", model_name, "...")
print(os.getcwd())
links_models_path = pkg_resources.resource_filename('afrotranslate', 'links_models.csv')
df = pd.read_csv(links_models_path)
try:
link = df.loc[df.model_name==model_name, "link"].values[0]
except:
raise ValueError("Model does not exist. Please select between the following list:", list(df.model_name))
id = link.split('/')[-2]
#dest_path = pkg_resources.resource_filename(f'afrotranslate.models.{model_name}', f'{model_name}.zip')
dest_dir = pkg_resources.resource_filename(f'afrotranslate', 'models')
os.makedirs(dest_dir+f"/{model_name}")
dest_path = dest_dir+f"/{model_name}/{model_name}.zip"
gdd.download_file_from_google_drive(file_id=id,
dest_path=dest_path,
unzip=True)
os.remove(dest_path)
print(model_name, "downloaded!")
def load_model(self, model_path:str=None, model_name:str=None, version:str=None, device:str="cpu") -> torch.nn.Module:
if model_name:
#Load publicly available Masakhane models
dest_dir = pkg_resources.resource_filename(f'afrotranslate', 'models')
model_dir = dest_dir+f"/{model_name}"
if not os.path.isdir(model_dir):
self.download_model(model_name)
if (version is None)or(version==""): #or(not os.path.isdir(model_dir+"/"+version)):
version = os.listdir(model_dir)[0]
print("As you don't provide any version we use this one by default:", version)
print("Here is the complete list of versions:", os.listdir(model_dir))
if (not version in os.listdir(model_dir)) : #subdir not in directory
first_element_in_dir = os.listdir(model_dir)[0]
if os.path.isdir(model_dir+"/"+first_element_in_dir):
raise ValueError('This version does not exit. Please select between the following list:', os.listdir(model_dir))
else:
warnings.warn("There is only one version for this model!")
if not os.path.isdir(model_dir+"/"+version): #if there is no subdirectory
version=""
model_dir = model_dir+"/"+version
cfg_file = model_dir+"/config.yaml"
ckpt=model_dir+"/model.ckpt"
elif model_path:
if not os.path.exists(model_path):
raise ValueError('Cannot locate model directory', model_path)
if not os.path.isdir(model_path):
raise ValueError('Model path must be a directory', model_path)
model_dir = model_path
cfg_file = model_path+"/config.yaml"
ckpt_candidates = [file for file in os.listdir(model_path) if file.endswith('ckpt')]
if len(ckpt_candidates) == 0:
raise ValueError('No checkpoint file under model directory', model_path)
elif len(ckpt_candidates) > 1:
print("WARNING: More than one checkpoint under model directory. Taking first:", ckpt_candidates[0])
ckpt = os.path.join(model_dir, ckpt_candidates[0])
else:
raise ValueError('Neither model id nor model directory path specified!')
if not os.path.exists(cfg_file):
raise ValueError('Cannot locate config.yaml in model directory', model_dir)
cfg = load_config(cfg_file)
# read vocabs
src_vocab_file = model_dir+ "/" +cfg["data"]["src_vocab"]
trg_vocab_file = model_dir + "/" +cfg["data"]["trg_vocab"]
if not os.path.exists(src_vocab_file):
raise ValueError('Cannot locate vocab file %s in model directory', src_vocab_file)
if not os.path.exists(trg_vocab_file):
raise ValueError('Cannot locate vocab file %s in model directory', trg_vocab_file)
src_vocab = Vocabulary(file=src_vocab_file)
trg_vocab = Vocabulary(file=trg_vocab_file)
if device is None:
use_cuda = torch.cuda.is_available()
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
if device =="cpu":
use_cuda = False
# load model state from disk
model_checkpoint = load_checkpoint(ckpt, use_cuda=use_cuda)
# build model and load parameters into it
model = build_model(cfg["model"], src_vocab=src_vocab, trg_vocab=trg_vocab)
model.load_state_dict(model_checkpoint["model_state"])
model.to(device)
return model, cfg, src_vocab, use_cuda
def translate(self, src_input:str, n_best:int=1) -> str:
"""
Code inspired by "prediction.py" from JoeyNMT
src_input: sentence in the source language
n_best: amount of candidates to display. Limited by the beam size in the configuration file.
"""
def _load_line_as_data(line):
""" Create a dataset from one line via a temporary file. """
# write src input to temporary file
tmp_name = "tmp"
tmp_suffix = ".src"
tmp_filename = tmp_name+tmp_suffix
with open(tmp_filename, "w", encoding="utf-8") as tmp_file:
tmp_file.write("{}\n".format(line))
test_data = MonoDataset(path=tmp_name, ext=tmp_suffix,
field=src_field)
# remove temporary file
if os.path.exists(tmp_filename):
os.remove(tmp_filename)
return test_data
def _translate_data(test_data):
""" Translates given dataset, using parameters from outer scope. """
score, loss, ppl, sources, sources_raw, references, hypotheses, \
hypotheses_raw, attention_scores = validate_on_data(
self.model, data=test_data, batch_size=batch_size,
batch_class=Batch, batch_type=batch_type, level=level,
max_output_length=max_output_length, eval_metric="",
use_cuda=self.use_cuda, compute_loss=False, beam_size=beam_size,
beam_alpha=beam_alpha, postprocess=postprocess,
bpe_type=bpe_type, sacrebleu=sacrebleu, n_gpu=n_gpu, n_best=n_best)
return hypotheses
data_cfg = self.cfg["data"]
level = data_cfg["level"]
lowercase = data_cfg["lowercase"]
tok_fun = lambda s: list(s) if level == "char" else s.split()
src_field = Field(init_token=None, eos_token=EOS_TOKEN, pad_token=PAD_TOKEN,
tokenize=tok_fun, batch_first=True, lower=lowercase,
unk_token=UNK_TOKEN, include_lengths=True)
src_field.vocab = self.src_vocab
# parse test args
batch_size, batch_type, _, device, n_gpu, level, _, \
max_output_length, beam_size, beam_alpha, postprocess, \
bpe_type, sacrebleu, _, _ = parse_test_args(self.cfg, mode="translate")
#Sentence tokenizing: useful in case there are several sentences
nlp = English()
nlp.add_pipe('sentencizer')
doc = nlp(src_input)
sentences = [sent.text.strip() for sent in doc.sents]
if len(sentences)==1:
# every line has to be made into dataset
test_data = _load_line_as_data(line=src_input)
hypotheses = _translate_data(test_data)[:n_best]
return hypotheses[0] if n_best==1 else hypotheses
print("Several sentences are detected. We split and translate them sequentially :).")
hypotheses_dictionary = {}
for i,sentence in enumerate(sentences):
# every line has to be made into dataset
test_data = _load_line_as_data(line=sentence)
hypotheses_dictionary[f"Sentence{i+1}"] = _translate_data(test_data)[0] if n_best==1 else _translate_data(test_data)[:n_best]
return hypotheses_dictionary | AfroTranslate | /AfroTranslate-0.0.6-py3-none-any.whl/afrotranslate/translator.py | translator.py |
Afuzz - An automated web path fuzzing tool
=======
Afuzz is an automated web path fuzzing tool for the Bug Bounty projects.


<a href="https://twitter.com/intent/tweet?text=afuzz-Afuzz is an automated web path fuzzing tool for the Bug Bounty projects.%20by%20@Rapiddns%0A%0Ahttps://github.com/rapiddns/afuzz">

</a>
**Afuzz** is being actively developed by [@rapiddns](https://twitter.com/rapiddns)
## Features
- Afuzz automatically detects the development language used by the website, and generates extensions according to the language
- Uses blacklist to filter invalid pages
- Uses whitelist to find content that bug bounty hunters are interested in in the page
- filters random content in the page
- judges 404 error pages in multiple ways
- perform statistical analysis on the results after scanning to obtain the final result.
- support HTTP2
Installation
------------
```
git clone https://github.com/rapiddns/Afuzz.git
cd Afuzz
python setup.py install
```
OR
```
pip install afuzz
```
Run
------------
```
afuzz -u http://testphp.vulnweb.com -t 30
```
Result
------------
Table
```
+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| http://testphp.vulnweb.com/ |
+-----------------------------+---------------------+--------+-----------------------------------+-----------------------+--------+--------------------------+-------+-------+-----------+----------+
| target | path | status | redirect | title | length | content-type | lines | words | type | mark |
+-----------------------------+---------------------+--------+-----------------------------------+-----------------------+--------+--------------------------+-------+-------+-----------+----------+
| http://testphp.vulnweb.com/ | .idea/workspace.xml | 200 | | | 12437 | text/xml | 217 | 774 | check | |
| http://testphp.vulnweb.com/ | admin | 301 | http://testphp.vulnweb.com/admin/ | 301 Moved Permanently | 169 | text/html | 8 | 11 | folder | 30x |
| http://testphp.vulnweb.com/ | login.php | 200 | | login page | 5009 | text/html | 120 | 432 | check | |
| http://testphp.vulnweb.com/ | .idea/.name | 200 | | | 6 | application/octet-stream | 1 | 1 | check | |
| http://testphp.vulnweb.com/ | .idea/vcs.xml | 200 | | | 173 | text/xml | 8 | 13 | check | |
| http://testphp.vulnweb.com/ | .idea/ | 200 | | Index of /.idea/ | 937 | text/html | 14 | 46 | whitelist | index of |
| http://testphp.vulnweb.com/ | cgi-bin/ | 403 | | 403 Forbidden | 276 | text/html | 10 | 28 | folder | 403 |
| http://testphp.vulnweb.com/ | .idea/encodings.xml | 200 | | | 171 | text/xml | 6 | 11 | check | |
| http://testphp.vulnweb.com/ | search.php | 200 | | search | 4218 | text/html | 104 | 364 | check | |
| http://testphp.vulnweb.com/ | product.php | 200 | | picture details | 4576 | text/html | 111 | 377 | check | |
| http://testphp.vulnweb.com/ | admin/ | 200 | | Index of /admin/ | 248 | text/html | 8 | 16 | whitelist | index of |
| http://testphp.vulnweb.com/ | .idea | 301 | http://testphp.vulnweb.com/.idea/ | 301 Moved Permanently | 169 | text/html | 8 | 11 | folder | 30x |
+-----------------------------+---------------------+--------+-----------------------------------+-----------------------+--------+--------------------------+-------+-------+-----------+----------+```
```
Json
```Json
{
"result": [
{
"target": "http://testphp.vulnweb.com/",
"path": ".idea/workspace.xml",
"status": 200,
"redirect": "",
"title": "",
"length": 12437,
"content_type": "text/xml",
"lines": 217,
"words": 774,
"type": "check",
"mark": "",
"subdomain": "testphp.vulnweb.com",
"depth": 0,
"url": "http://testphp.vulnweb.com/.idea/workspace.xml"
},
{
"target": "http://testphp.vulnweb.com/",
"path": "admin",
"status": 301,
"redirect": "http://testphp.vulnweb.com/admin/",
"title": "301 Moved Permanently",
"length": 169,
"content_type": "text/html",
"lines": 8,
"words": 11,
"type": "folder",
"mark": "30x",
"subdomain": "testphp.vulnweb.com",
"depth": 0,
"url": "http://testphp.vulnweb.com/admin"
},
{
"target": "http://testphp.vulnweb.com/",
"path": "login.php",
"status": 200,
"redirect": "",
"title": "login page",
"length": 5009,
"content_type": "text/html",
"lines": 120,
"words": 432,
"type": "check",
"mark": "",
"subdomain": "testphp.vulnweb.com",
"depth": 0,
"url": "http://testphp.vulnweb.com/login.php"
},
{
"target": "http://testphp.vulnweb.com/",
"path": ".idea/.name",
"status": 200,
"redirect": "",
"title": "",
"length": 6,
"content_type": "application/octet-stream",
"lines": 1,
"words": 1,
"type": "check",
"mark": "",
"subdomain": "testphp.vulnweb.com",
"depth": 0,
"url": "http://testphp.vulnweb.com/.idea/.name"
},
{
"target": "http://testphp.vulnweb.com/",
"path": ".idea/vcs.xml",
"status": 200,
"redirect": "",
"title": "",
"length": 173,
"content_type": "text/xml",
"lines": 8,
"words": 13,
"type": "check",
"mark": "",
"subdomain": "testphp.vulnweb.com",
"depth": 0,
"url": "http://testphp.vulnweb.com/.idea/vcs.xml"
},
{
"target": "http://testphp.vulnweb.com/",
"path": ".idea/",
"status": 200,
"redirect": "",
"title": "Index of /.idea/",
"length": 937,
"content_type": "text/html",
"lines": 14,
"words": 46,
"type": "whitelist",
"mark": "index of",
"subdomain": "testphp.vulnweb.com",
"depth": 0,
"url": "http://testphp.vulnweb.com/.idea/"
},
{
"target": "http://testphp.vulnweb.com/",
"path": "cgi-bin/",
"status": 403,
"redirect": "",
"title": "403 Forbidden",
"length": 276,
"content_type": "text/html",
"lines": 10,
"words": 28,
"type": "folder",
"mark": "403",
"subdomain": "testphp.vulnweb.com",
"depth": 0,
"url": "http://testphp.vulnweb.com/cgi-bin/"
},
{
"target": "http://testphp.vulnweb.com/",
"path": ".idea/encodings.xml",
"status": 200,
"redirect": "",
"title": "",
"length": 171,
"content_type": "text/xml",
"lines": 6,
"words": 11,
"type": "check",
"mark": "",
"subdomain": "testphp.vulnweb.com",
"depth": 0,
"url": "http://testphp.vulnweb.com/.idea/encodings.xml"
},
{
"target": "http://testphp.vulnweb.com/",
"path": "search.php",
"status": 200,
"redirect": "",
"title": "search",
"length": 4218,
"content_type": "text/html",
"lines": 104,
"words": 364,
"type": "check",
"mark": "",
"subdomain": "testphp.vulnweb.com",
"depth": 0,
"url": "http://testphp.vulnweb.com/search.php"
},
{
"target": "http://testphp.vulnweb.com/",
"path": "product.php",
"status": 200,
"redirect": "",
"title": "picture details",
"length": 4576,
"content_type": "text/html",
"lines": 111,
"words": 377,
"type": "check",
"mark": "",
"subdomain": "testphp.vulnweb.com",
"depth": 0,
"url": "http://testphp.vulnweb.com/product.php"
},
{
"target": "http://testphp.vulnweb.com/",
"path": "admin/",
"status": 200,
"redirect": "",
"title": "Index of /admin/",
"length": 248,
"content_type": "text/html",
"lines": 8,
"words": 16,
"type": "whitelist",
"mark": "index of",
"subdomain": "testphp.vulnweb.com",
"depth": 0,
"url": "http://testphp.vulnweb.com/admin/"
},
{
"target": "http://testphp.vulnweb.com/",
"path": ".idea",
"status": 301,
"redirect": "http://testphp.vulnweb.com/.idea/",
"title": "301 Moved Permanently",
"length": 169,
"content_type": "text/html",
"lines": 8,
"words": 11,
"type": "folder",
"mark": "30x",
"subdomain": "testphp.vulnweb.com",
"depth": 0,
"url": "http://testphp.vulnweb.com/.idea"
}
],
"total": 12,
"target": "http://testphp.vulnweb.com/"
}
```
Wordlists (IMPORTANT)
---------------
**Summary:**
- Wordlist is a text file, each line is a path.
- About extensions, Afuzz replaces the `%EXT%` keyword with extensions from **-e** flag.If no flag -e, the default is used.
- Generate a dictionary based on domain names. Afuzz replaces %subdomain% with host, %rootdomain% with root domain, %sub% with subdomain, and %domain% with domain. And generated according to %ext%
**Examples:**
- Normal extensions
```
index.%EXT%
```
Passing **asp** and **aspx** extensions will generate the following dictionary:
```
index
index.asp
index.aspx
```
- host
```
%subdomain%.%ext%
%sub%.bak
%domain%.zip
%rootdomain%.zip
```
Passing **https://test-www.hackerone.com** and **php** extension will genrate the following dictionary:
```
test-www.hackerone.com.php
test-www.zip
test.zip
www.zip
testwww.zip
hackerone.zip
hackerone.com.zip
```
Options
-------
```
# ###### ### ### ###### ######
# # # # # # # # #
# # # # # # # # # #
# # ### # # # #
# # # # # # # #
##### # # # # # # #
# # # # # # # # #
### ### ### ### ###### ######
usage: afuzz [options]
An Automated Web Path Fuzzing Tool.
By RapidDNS (https://rapiddns.io)
options:
-h, --help show this help message and exit
-u URL, --url URL Target URL
-o OUTPUT, --output OUTPUT
Output file
-e EXTENSIONS, --extensions EXTENSIONS
Extension list separated by commas (Example: php,aspx,jsp)
-t THREAD, --thread THREAD
Number of threads
-d DEPTH, --depth DEPTH
Maximum recursion depth
-w WORDLIST, --wordlist WORDLIST
wordlist
-f, --fullpath fullpath
-p PROXY, --proxy PROXY
proxy, (ex:http://127.0.0.1:8080)
```
How to use
---------------
Some examples for how to use Afuzz - those are the most common arguments. If you need all, just use the **-h** argument.
### Simple usage
```
afuzz -u https://target
```
```
afuzz -e php,html,js,json -u https://target
```
```
afuzz -e php,html,js -u https://target -d 3
```
### Threads
The thread number (**-t | --threads**) reflects the number of separated brute force processes. And so the bigger the thread number is, the faster afuzz runs. By default, the number of threads is 10, but you can increase it if you want to speed up the progress.
In spite of that, the speed still depends a lot on the response time of the server. And as a warning, we advise you to keep the threads number not too big because it can cause DoS.
```
afuzz -e aspx,jsp,php,htm,js,bak,zip,txt,xml -u https://target -t 50
```
----
### Blacklist
The **blacklist.txt** and **bad_string.txt** files in the /db directory are blacklists, which can filter some pages
The **blacklist.txt** file is the same as dirsearch.
The **bad_stirng.txt** file is a text file, one per line. The format is position==content. With == as the separator, position has the following options: header, body, regex, title
----
### Language detection
The **language.txt** is the detection language rule, the format is consistent with **bad_string.txt**. Development language detection for website usage.
References
---------------
Thanks to open source projects for inspiration
- [Dirsearch](ttps://github.com/maurosoria/dirsearch) by by Shubham Sharma
- [wfuzz](https://github.com/xmendez/wfuzz) by Xavi Mendez
- [arjun](https://github.com/s0md3v/Arjun) by Somdev Sangwan
| Afuzz | /Afuzz-0.1.12.tar.gz/Afuzz-0.1.12/README.md | README.md |
import os
from pathlib import Path
import platform
from afuzz.utils.common import compatible_path
import afuzz.lib.config as mem
#VERSION = "0.1.6"
DEFAULT_HEADERS = {
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36",
"accept": "*/*",
"accept-encoding": "*",
"keep-alive": "timeout=30, max=1000",
"cache-control": "max-age=0",
"Range": "bytes=0-1024000"
}
current_dir = Path(__file__).resolve().parent.parent.parent
afuzz_dir = compatible_path(mem.__file__.replace(compatible_path('/lib/config.py'), ''))
#DATA = str(Path(__file__).parent.resolve() / "db")
DATA = str(Path(__file__).parent.resolve() / "db")
PLATFORM_SYSTEM = platform.system()
NEW_LINE = os.linesep
DEFAULT_ENCODING = "utf-8"
COMMON_EXTENSIONS = ["html", "htm", "js"]
BACKUP_EXTENSIONS = ["zip", "rar", "tar", "tar.gz", "war", "jar", "tar.bz2", "sql", "bak"]
CONFIG_EXTENSIONS = ["conf", "config", "log", "properties", "ini", "json", "txt", "xml", "yaml", "yml"]
ASPX_EXTENSIONS = ["aspx", "ashx", "asmx", "dll"]
PHP_EXTENSIONS = ["php", "php5", "php7", "inc"]
JAVA_EXTENSIONS = ["do", "action", "jsp", "java", "jspx"]
OTHER_EXTENSIONS = ["sh", "bat", "cgi", "go"]
WS_EXTENSIONS = ["wsdl", "asmx", "asdl", "jws"]
PYTHON_EXTENSIONS = ["py", "pyc"]
MEDIA_EXTENSIONS = [
"webm", "mkv", "avi", "ts", "mov", "qt", "amv", "mp4", "m4p", "m4v", "mp3", "swf", "mpg", "mpeg", "jpg", "jpeg",
"pjpeg", "png", "woff", "woff2", "svg", "webp", "bmp", "pdf", "wav", "vtt"]
# EXCLUDE_OVERWRITE_EXTENSIONS = MEDIA_EXTENSIONS + ("axd", "cache", "coffee", "conf", "config", "css", "dll", "lock", "log", "key", "pub", "properties", "ini", "jar", "js", "json", "toml", "txt", "xml", "yaml", "yml")
CRAWL_ATTRIBUTES = ["action", "cite", "data", "formaction", "href", "longdesc", "poster", "src", "srcset", "xmlns"]
CRAWL_TAGS = [
"a", "area", "base", "blockquote", "button", "embed", "form", "frame", "frameset", "html", "iframe", "input", "ins",
"noframes", "object", "q", "script", "source"]
AUTHENTICATION_TYPES = ["basic", "digest", "bearer", "ntlm", "jwt"]
ROBOTS_TXT_REGEX = r"(?:Allow|Disallow): /(.*)"
ITER_CHUNK_SIZE = 1024 * 1024
MAX_RESPONSE_SIZE = 1000 * 1024 * 1024
UNKNOWN = "unknown"
TEXT_CHARS = bytearray({7, 8, 9, 10, 12, 13, 27} | set(range(0x20, 0x100)) - {0x7F})
DEFAULT_404 = "gltetAc0012adddalmz/19lsjfo.html"
FOLDER_404 = "nt4ffaoxewps/ablzqqqo/eelsqqwlz"
DOT_404 = ".ADMINDDDD"
WAF_404 = '?tag="><script>alert(/test/);</script>' | Afuzz | /Afuzz-0.1.12.tar.gz/Afuzz-0.1.12/afuzz/settings.py | settings.py |
import re
import hashlib
from urllib.parse import urlparse
from afuzz.utils.mimetype import MimeTypeUtils, guess_mimetype
from afuzz.settings import DEFAULT_ENCODING, UNKNOWN, MAX_RESPONSE_SIZE, ITER_CHUNK_SIZE
from afuzz.lib.dictionary import Dictionary
from afuzz.utils.common import CaseInsensitiveDict
class Response:
def __init__(self, response):
self.url = response.url
self.status = response.status_code
self.headers = CaseInsensitiveDict(response.headers)
self.redirect = self.headers.get("location") or ""
self.history = [res.url for res in response.history]
self.content = ""
self.body = response.content
self.text = response.text
self.title = self.page_title()
if not MimeTypeUtils.is_binary(self.body):
self.content = self.body.decode(
response.encoding or DEFAULT_ENCODING, errors="ignore"
)
lang_dict = Dictionary(list_type="language")
result, language, match_str = lang_dict.match(response)
if result:
self.language = language
else:
self.language = UNKNOWN
def page_title(self):
tt = re.search("<title.*?>(.*?)</title>", self.content, re.IGNORECASE | re.DOTALL)
try:
page_title = tt.group(1)
except Exception:
return ''
if page_title:
return page_title.strip()
@property
def raw_header(self):
header = []
header = ["%s: %s" % (key, value) for key, value in self.headers.items()]
return "\n".join(header)
@property
def type(self):
if "content-type" in self.headers:
return self.headers.get("content-type").split(";")[0]
body_type = guess_mimetype(self.body)
if body_type != "text/plain":
return body_type
return UNKNOWN
@property
def length(self):
try:
print(self.headers.get("content-length"))
print(int(self.headers.get("content-length")))
l = int(self.headers.get("content-length"))
#print(l)
except TypeError:
l = len(self.body)
#print("typeerror")
#print(l)
return l
@property
def lines(self):
body_line = len(self.content.split("\n"))
if body_line < 1:
body_line2 = len(self.content.split("\r"))
return body_line2
else:
return body_line
def md5(self, content=None):
m = hashlib.md5()
if content:
m.update(content.encode())
else:
m.update(self.body)
return m.hexdigest()
@property
def words(self):
regex = re.compile(r"\S+", re.I+re.DOTALL)
return len(regex.findall(self.content))
def clean_page(self, path=None):
patterns = [
r"[a-f\d]{4}(?:[a-f\d]{4}-){4}[a-f\d]{12}",
r"[0-9]{4}[-][0-9]{1,2}[-][0-9]{1,2}.\d\d:\d\d:\d\d(\.\d+)?Z?",
r"[0-9]{4}[-][0-9]{1,2}[-][0-9]{1,2}",
r"[0-9]{4}[/][0-9]{1,2}[/][0-9]{1,2}",
r"<!--.+-->"
]
'''
examples
content = "test_ e155518c-ca1b-443c-9be9-fe90fdab7345, 41E3DAF5-6E37-4BCC-9F8E-0D9521E2AA8D, 00000000-0000-0000-0000-000000000000"
content += "2020-10-22T07:56:07.867Z,,,,asdasdasn"
content += "2023-01-27 10:21:39Z"
content += "33bb81a8-f625-4d38-8502-a6c192890ad2" + aabcd1llmzn"
content += "64d56471-807d-41d8-a331-67e38c1bbd8c"
'''
content = self.content
if "application/" in self.type and "application/json" not in self.type:
return content
for pattern in patterns:
regex = re.compile(pattern, re.I)
content = re.sub(regex, "", content)
if self.type == "application/json":
regex = re.compile(r"\d{10,13}")
content = re.sub(regex, "", content)
url = str(self.url)
content = content.replace(url, "")
path = urlparse(url).path
if path:
content = content.replace(path, "")
return content | Afuzz | /Afuzz-0.1.12.tar.gz/Afuzz-0.1.12/afuzz/lib/response.py | response.py |
import re
import os
from urllib.parse import urlparse
import asyncio
import httpx
import time
from tqdm import tqdm
from afuzz.utils.common import CaseInsensitiveDict, is_ip
from afuzz.settings import DEFAULT_HEADERS
from afuzz.lib.dictionary import Dictionary
from afuzz.settings import PHP_EXTENSIONS, ASPX_EXTENSIONS, JAVA_EXTENSIONS, COMMON_EXTENSIONS, MEDIA_EXTENSIONS, \
BACKUP_EXTENSIONS, CONFIG_EXTENSIONS, PYTHON_EXTENSIONS, OTHER_EXTENSIONS, WS_EXTENSIONS, UNKNOWN, DATA
from afuzz.lib.response import Response
from afuzz.settings import DOT_404, DEFAULT_404, FOLDER_404, WAF_404
from afuzz.lib.result import FuzzResult
from afuzz.utils.common import compatible_path
class Fuzzer:
def __init__(self, options, *args, **kwargs):
self.options = options
self._target = options.get("target", "")
self.depth = options.get("depth", 0)
self.headers = CaseInsensitiveDict(DEFAULT_HEADERS)
self.proxy = options.get("proxy", None)
if self.proxy:
self.session = httpx.AsyncClient(headers=self.headers, verify=False, follow_redirects=False, timeout=60,
http2=True, proxies=self.proxy)
else:
self.session = httpx.AsyncClient(headers=self.headers, verify=False, follow_redirects=False, timeout=60,
http2=True)
self.dict = []
self.baddict = Dictionary(list_type="badstr")
self.blacklist = Dictionary(list_type="blacklist")
self.whithlist = Dictionary(list_type="whitelist")
self.page_404 = None
self.folder_404 = None
self.dot_404 = None
self.page_index = None
#self.url_parse = urlparse(self._target)
self.targets_queue = asyncio.Queue()
self.result_queue = asyncio.Queue()
self.stop_flag = False
fullpath = options.get("fullpath")
self.result = FuzzResult(self._target, options.get("output", ""), fullpath)
self.waf_404 = None
self.scanner_queue = []
self.stop_count = 0
self.prossbar = None
self.wordlist = options.get("wordlist", "")
async def send_msg(self, msg_type, msg_content):
await self.result_queue.put({"type": msg_type, "content": msg_content})
async def check_vuln(self, response, url, path, depth, target=None):
wl_match_result, wl_match_pos, wl_match_str = self.whithlist.match(response)
bl_match_result, bl_match_pos, bl_match_str = self.blacklist.match(response, path)
reresult_404 = self.check_404_re(response)
result_404 = self.check_404_page(response)
if response.status == 400:
return False
if response.status == 429:
print("The request is too fast, please reduce the number of threads.")
return False
# print(resp.status)
redirect = response.redirect.strip()
# Exclude if it jumps to the homepage of the website
if not target:
target = self._target
index_url = target.replace(":443/", "/").replace(":80/", "/")
if index_url.startswith("https://"):
index_url_protocol = index_url.replace("https://", "http://")
else:
index_url_protocol = index_url.replace("http://", "https://")
if redirect == index_url or redirect == index_url_protocol or redirect == "/":
return False
# If it hits the whitelist, return
if wl_match_result:
find_type = "whitelist"
mark = wl_match_str
await self.send_msg("vuln", (find_type, url, path, response, mark, depth, target))
return True
# Skip if hit blacklist
# print(self.baddict.match(resp)[0])
if self.baddict.match(response)[0] == True:
return False
# Determine folder path
new_url = url.replace(":443/", "/").replace(":80/", "/")
if new_url.startswith("https://"):
new_url_protocol = new_url.replace("https://", "http://")
else:
new_url_protocol = new_url.replace("http://", "https://")
if (not new_url.endswith("/") and (redirect == new_url + "/" or redirect == new_url_protocol + "/")) \
or redirect == "/" + path + "/":
tqdm.write("%s 30x" % url)
find_type = "folder"
mark = "30x"
await self.send_msg("vuln", (find_type, url, path, response, mark, depth, target))
return True
# Check blacklist content
if bl_match_result or result_404 or reresult_404:
return False
if new_url[:-1] == redirect and new_url.endswith("/") and response.status != 403:
return False
if new_url.endswith("/") and response.status == 403 and self.folder_404.status != 403:
tqdm.write("%s 403" % url)
find_type = "folder"
mark = "403"
await self.send_msg("vuln", (find_type, url, path, response, mark, depth, target))
return True
# all checks passed
find_type = "check"
mark = ""
await self.send_msg("vuln", (find_type, url, path, response, mark, depth, target))
return True
async def save_result(self):
# scan_result = FuzzResult(self._target)
while not self.stop_flag or self.result_queue.qsize() > 0:
if self.result_queue.qsize() == 0:
await asyncio.sleep(0.1)
continue
while self.result_queue.qsize() > 0:
msg = await self.result_queue.get()
if msg["type"] == "msg":
tqdm.write(msg["content"])
else:
find_type, url, path, resp, mark, depth, target = msg["content"]
# check depth
if self.depth > depth + 1 and find_type == "folder":
tqdm.write("\n%s (Add queue)" % url)
await self.produce(url, depth=depth + 1)
# await self.scanner_queue.put((url, depth + 1))
self.result.add(resp, path, find_type, mark, target=target, depth=depth)
# print(scan_result.output())
async def produce(self, target=None, depth=0):
if not target:
target = self._target
if not target.endswith("/"):
target = target + "/"
if target not in self.scanner_queue:
self.scanner_queue.append(target)
else:
return True
##if depth > 0:
# print(target)
for path in self.dict:
await self.targets_queue.put({"target": target, "path": path, "depth": depth})
for _ in range(self.options["threads"]):
await self.targets_queue.put({"target": "end", "path": "", "depth": depth})
self.processbar.total = self.targets_queue.qsize()
async def consume(self):
status_50x = 0
timeout_count = 0
while self.targets_queue.qsize() > 0:
target = await self.targets_queue.get()
if target["target"] == "end" and self.depth == 0:
# wait for task to complete
for _ in range(2):
await asyncio.sleep(1)
if status_50x > 10:
await asyncio.sleep(0.1)
break
path = target["path"]
url = target["target"] + path
depth = target["depth"]
if timeout_count >= 10:
break
for _ in range(3):
try:
# read timeout
resp = Response(await self.session.get(url))
# self.processbar.update(self.process)
break
except TimeoutError:
timeout_count += 1
asyncio.sleep(2)
continue
except Exception as e:
timeout_count += 1
if self.proxy:
self.session = httpx.AsyncClient(headers=self.headers, verify=False, follow_redirects=False,
timeout=60, http2=True, proxies=self.proxy)
else:
self.session = httpx.AsyncClient(headers=self.headers, verify=False, follow_redirects=False,
timeout=60, http2=True)
#self.session = httpx.AsyncClient(headers=self.headers, verify=False, follow_redirects=False,
# timeout=60, http2=True)
resp = None
continue
if not resp:
continue
if resp and resp.status > 501: # 502 and above problems are more than 5 times, then end
status_50x += 1
continue
await self.check_vuln(resp, url, path, depth, target=target["target"])
self.processbar.update(1)
def get_exts(self, custom=None):
if custom:
exts = custom
else:
language = self.page_index.language
if language == "aspx":
ext = ASPX_EXTENSIONS
elif language == "php":
ext = PHP_EXTENSIONS
elif language == "java":
ext = JAVA_EXTENSIONS
elif language == "python":
ext = PYTHON_EXTENSIONS
elif language == UNKNOWN:
ext = ASPX_EXTENSIONS + PHP_EXTENSIONS + JAVA_EXTENSIONS + PYTHON_EXTENSIONS
exts = ext + COMMON_EXTENSIONS + CONFIG_EXTENSIONS + OTHER_EXTENSIONS + WS_EXTENSIONS
return exts
async def start(self):
# 1. Determine the language selection dictionary
# 2. Get 404 page content
# 3. Create a scanning coroutine
# 4. Create a result display and save coroutine.
await self.get_index()
ext = []
if not self.page_index:
print("Failed to access url!")
return True
# result_file = open("./result/" + self.result_filename, "w", encoding='utf-8')
language = self.page_index.language
await self.send_msg("msg", "language: %s " % language)
exts = self.get_exts(self.options["exts"])
if urlparse(self._target).netloc:
subdomain = urlparse(self._target).netloc.split(":", 1)[0]
if is_ip(subdomain):
subdomain = ""
else:
subdomain = ""
print("Generating dictionary...")
if self.wordlist:
dict = Dictionary(files=[self.wordlist], extensions=exts)
self.dict = dict
else:
dict = Dictionary(subdomain=subdomain, extensions=exts)
if not self.options["exts"]:
back_dict = Dictionary(subdomain=subdomain, files=[compatible_path(DATA + "/backup.txt")],
extensions=BACKUP_EXTENSIONS)
#self.dict = list(set(self.dict.items() + back_dict.items()))
self.dict = dict + back_dict
else:
self.dict = dict
self.total = len(self.dict)
#self.prossbar = tqdm(self.total)
self.processbar = tqdm(range(self.total), dynamic_ncols=True, desc="scanner")
tqdm.write("A total of %d entries in the dictionary" % self.total)
tqdm.write("Start getting 404 error pages")
await self.get_404_page("file")
await self.get_404_page("folder")
await self.get_404_page("dot")
await self.get_404_page("waf")
tqdm.write("Get 404 page complete")
tqdm.write("Create scan tasks")
asyncio.create_task(self.save_result())
try:
asyncio.create_task(self.produce())
all_process = [self.consume() for _ in range(self.options["threads"])]
await asyncio.gather(*all_process)
except KeyboardInterrupt as e:
self.stop_flag = True
await self.send_msg("msg", "Scan aborted by user")
exit(-1)
except Exception as e:
import traceback
traceback.print_exc()
await self.send_msg("msg", "[__main__.exception] %s %s" % (type(e), str(e)))
#self.prossbar.close()
# Analyze scanned results, display and write final results after processing
self.result.analysis()
print(self.result.table)
# self.result.save_table()
self.result.save()
self.stop_flag = True
async def get_404_page(self, notfound_type="file"):
# DOT_404, DEFAULT_404, FOLDER_404
if notfound_type == "file":
path = DEFAULT_404
elif notfound_type == "folder":
path = FOLDER_404
elif notfound_type == "dot":
path = DOT_404
elif notfound_type == "waf":
path = WAF_404
try:
page404_resp = Response(await self.session.get(self._target + path))
except:
page404_resp = None
if notfound_type == "file":
self.page_404 = page404_resp
elif notfound_type == "folder":
self.folder_404 = page404_resp
elif notfound_type == "dot":
self.dot_404 = page404_resp
elif notfound_type == "waf":
self.waf_404 = page404_resp
return page404_resp
async def get_index(self):
try:
index_resp = await self.session.get(self._target)
self.page_index = Response(index_resp)
print(self.page_index.title)
except Exception as e:
import traceback
traceback.print_exc()
self.page_index = None
return self.page_index
# self.page_index = Response(await self.session.get(self._target))
def check_404_re(self, response):
regex = "404 (Page )*[nN]ot [fF]ound|[Ss]erver error|<title>404</title>"
if re.findall(regex, response.content, re.I):
return True
else:
return False
def check_404_page(self, response):
if response.status == 404:
return True
temp_404 = self.clean_page(self.page_404, DEFAULT_404)
temp_page = self.clean_page(response)
temp2_404 = self.clean_page(self.dot_404, DOT_404)
folder_404 = self.clean_page(self.folder_404, FOLDER_404)
if self.page_404.status == response.status and self.page_404.md5(temp_404) == response.md5(temp_page):
return True
elif self.dot_404.status == response.status and self.dot_404.md5(temp2_404) == response.md5(temp_page):
return True
elif self.folder_404.status == response.status and self.folder_404.md5(folder_404) == response.md5(temp_page):
return True
elif self.page_404.status == response.status and \
self.page_404.lines == response.lines and self.page_404.title == response.title:
return True
elif self.folder_404.status == response.status and \
self.folder_404.lines == response.lines and self.folder_404.title == response.title:
return True
elif self.dot_404.status == response.status and \
self.dot_404.lines == response.lines and self.dot_404.title == response.title:
return True
elif self.waf_404.status == response.status and \
self.waf_404.lines == response.lines and self.waf_404.title == response.title:
return True
elif self.page_index.status == response.status and \
self.page_index.lines == response.lines and self.page_index.title == response.title:
return True
else:
return False
def clean_page(self, response, path=None):
patterns = [
r"[a-f\d]{4}(?:[a-f\d]{4}-){4}[a-f\d]{12}",
r"[0-9]{4}[-][0-9]{1,2}[-][0-9]{1,2}.\d\d:\d\d:\d\d(\.\d+)?Z?",
r"[0-9]{4}[-][0-9]{1,2}[-][0-9]{1,2}",
r"[0-9]{4}[/][0-9]{1,2}[/][0-9]{1,2}",
r"<!--.+-->"
]
'''
匹配的例子
content = "test_ e155518c-ca1b-443c-9be9-fe90fdab7345, 41E3DAF5-6E37-4BCC-9F8E-0D9521E2AA8D, 00000000-0000-0000-0000-000000000000"
content += "2020-10-22T07:56:07.867Z,,,,asdasdasn"
content += "2023-01-27 10:21:39Z"
content += "33bb81a8-f625-4d38-8502-a6c192890ad2" + aabcd1llmzn"
content += "64d56471-807d-41d8-a331-67e38c1bbd8c"
'''
if response:
content = response.content
for pattern in patterns:
regex = re.compile(pattern, re.I)
content = re.sub(regex, "", content)
if response.type == "application/json":
regex = re.compile(r"\d{10,13}")
content = re.sub(regex, "", content)
url = str(response.url)
content = content.replace(url, "")
content = content.replace(self._target, "")
path = urlparse(url).path
# print(path)
if path:
content = content.replace(path, "")
return content
return "" | Afuzz | /Afuzz-0.1.12.tar.gz/Afuzz-0.1.12/afuzz/lib/fuzzer.py | fuzzer.py |
import re
import tldextract
from afuzz.settings import CONFIG_EXTENSIONS, COMMON_EXTENSIONS, JAVA_EXTENSIONS, PHP_EXTENSIONS, ASPX_EXTENSIONS, \
BACKUP_EXTENSIONS, OTHER_EXTENSIONS
from afuzz.utils.common import CaseInsensitiveDict, compatible_path
from afuzz.settings import DATA
class Dictionary:
def __init__(self, **kwargs):
self._index = 0
self._items = self.generate(**kwargs)
self.type = kwargs.get("list_type", None)
@property
def index(self):
return self._index
def __next__(self):
try:
path = self._items[self._index]
except IndexError:
raise StopIteration
self._index += 1
return path
def __contains__(self, item):
return item in self._items
def __getstate__(self):
return (self._items, self._index)
def __setstate__(self, state):
self._items, self._index = state
def __iter__(self):
return iter(self._items)
def __len__(self):
return len(self._items)
def __add__(self, other):
new_items = self._items + other._items
return self.__class__(list_type="add", items=new_items)
def items(self):
return self._items
def read_list(self, filename):
result_list = []
with open(filename, "r", encoding="utf-8") as list_file:
for line in list_file:
line = line.strip()
if line and "==" in line:
key, value = line.split("==", 1)
result_list.append((key, value))
return result_list
def generate(self, subdomain="", files=[], extensions=[], list_type="path",items=[]):
wordlist = []
if list_type == "add":
wordlist = list(set(items))
return wordlist
if not files and list_type == "path":
for name in ["dict.txt"]:
files.append(compatible_path(DATA + "/" + name))
#files = [DATA + "/api.txt"]
if list_type == "path":
tld_res = tldextract.extract(subdomain)
domain = tld_res.domain
root_domain = domain + "." + tld_res.suffix
sub = subdomain.rstrip(root_domain)
subs = []
if "." in sub:
subnames = sub.split(".")
for subname in subnames:
if "-" in subname:
for temp in subname.split("-"):
if temp not in subs:
subs.append(temp)
else:
subs.append(subname)
sub_all = "".join(subs)
sub_all_a = "-".join(subs)
sub_all_b = "_".join(subs)
subs.append(sub_all)
subs.append(sub_all_a)
subs.append(sub_all_b)
if sub not in subs:
subs.append(sub)
new_wordlist = []
for filename in files:
with open(filename, "r", encoding="utf=8") as dict_file:
for line in dict_file:
line = line.strip()
temp_list = []
if "%subdomain%" in line and subdomain:
temp_list.append(line.replace("%subdomain%", subdomain))
elif "%domain%" in line and domain:
temp_list.append(line.replace("%domain%", domain))
elif "%rootdomain%" in line and root_domain:
temp_list.append(line.replace("%rootdomain%", root_domain))
elif "%sub%" in line and subs:
for subname in subs:
temp_list.append(line.replace("%sub%", subname))
else:
temp_list.append(line)
for temp in temp_list:
if temp not in new_wordlist:
new_wordlist.append(temp)
for line in new_wordlist:
if "%ext%" in line or "%EXT%" in line:
for ext in extensions:
dict_text = line.replace("%ext%".upper(), ext)
dict_text = dict_text.replace("%ext%", ext)
if dict_text not in wordlist:
wordlist.append(dict_text)
elif line not in wordlist:
wordlist.append(line)
elif list_type == "badstr":
filepath = compatible_path(DATA + "/bad_strings.txt")
elif list_type == "whitelist":
filepath = compatible_path(DATA + "/whitelist.txt")
elif list_type == "blacklist":
filepath = compatible_path(DATA + "/blacklist.txt")
elif list_type == "language":
filepath = compatible_path(DATA + "/language.txt")
if list_type != "path":
wordlist = self.read_list(filepath)
return wordlist
def match(self, response, path=None):
ret = (False, None, None)
headers = CaseInsensitiveDict(response.headers)
for pos, match_str in self._items:
pos = pos.lower()
match_str = match_str.lower()
if self.type != "language":
if response.page_title():
title = response.page_title().lower()
else:
title = ""
else:
title = ""
content = response.content.lower()
if pos == "title":
if match_str in title or match_str == title:
ret = (True, pos, match_str)
elif pos == "body":
if match_str in content:
ret = (True, pos, match_str)
elif pos == "regex":
regex = re.compile(match_str, re.I)
match = regex.match(str(headers).lower() + content)
if match:
ret = (True, pos, match_str)
elif pos in ["400", "403", "500"]:
if match_str == path:
ret = (True, pos, match_str)
elif pos == "header":
for _, value in headers.items():
if match_str.lower() in value.lower():
ret = (True, pos, match_str)
elif pos == "cookie":
if match_str in str(response.cookie).lower():
ret = (True, pos, match_str)
elif pos in ["php", "aspx", "java", "python"]:
for _, value in headers.items():
if match_str in value.lower():
ret = (True, pos, match_str)
#if match_str in response.body:
# ret = (True, pos)
return ret | Afuzz | /Afuzz-0.1.12.tar.gz/Afuzz-0.1.12/afuzz/lib/dictionary.py | dictionary.py |
import os
import json
from urllib.parse import urlparse
from prettytable import PrettyTable
import pandas as pd
class FuzzResult:
def __init__(self, *args, **kwargs):
self.target = args[0]
self.result = []
self._result = []
self.table = PrettyTable()
self.table.title = self.target
self.save_filename = self.target.replace(":", "_").replace("/", "_")
self.last_result = {"result": [], "total": 0, "target": self.target}
self.opt_output = args[1]
self.fullpath = args[2]
if self.fullpath:
self.table.field_names = ["url", "status", "redirect", "title", "length", "content-type", "lines",
"words", "type", "mark"]
else:
self.table.field_names = ["target", "path", "status", "redirect", "title", "length", "content-type", "lines",
"words", "type", "mark"]
# self.row_title = ["target","path","status","title","length","lines","words","type","mark"]
def add(self, response, path, find_type, mark, target=None, depth=0):
title = response.page_title()
lines = response.lines
words = response.words
status = str(response.status)
subdomain = urlparse(target).netloc.split(":", 1)[0]
url = target + path
if url not in self._result:
self._result.append(url)
if "application/" in response.type and "application/json" not in response.type:
length = response.length
else:
length = len(response.clean_page())
self.result.append(
{
"target": target,
"path": path,
"status": response.status,
"redirect": response.redirect,
"title": title,
"length": length,
"content_type": response.type,
"lines": lines,
"words": words,
"type": find_type,
"mark": mark,
"subdomain": subdomain,
"depth": depth,
"url": url
}
)
def output(self):
if self.last_result["total"] > 0:
print(self.table)
def save(self):
folder = "/".join(self.opt_output.split("/")[:-1])
if not os.path.exists(folder):
try:
os.mkdir(folder)
except:
print("mkdir error")
#return False
if self.last_result["total"] > 0:
with open("%s.json" % self.opt_output, "w", encoding="utf-8") as save_file:
save_file.write(json.dumps(self.last_result))
def save_table(self):
if not os.path.exists(self.output):
try:
os.mkdir(self.output)
except:
print("mkdir error")
return False
if self.last_result["total"] > 0:
with open("%s_%d.txt" % (self.output+"/"+self.save_filename, self.last_result["total"]), "w", encoding="utf-8") \
as save_file:
save_file.write(self.target + "\n")
save_file.write(self.table.get_string())
def analysis(self):
print("\nStart analyzing scan results...")
result_list = self.result
if result_list:
result_df = pd.DataFrame(result_list)
total = len(result_list)
if total > 20:
data_group = result_df.groupby(['type', 'status', 'length', 'lines'])
for dp, value in data_group.groups.items():
#print(dp)
find_type, status, length, lines = dp
dp_len = len(value)
prefect = dp_len / total * 100
if dp_len < 20:
rows = result_df[(result_df["type"] == find_type) & (result_df["status"] == status) & (
length == result_df["length"])]
for index, row in rows.iterrows():
total = total - 1
print(row)
row_list = row.to_list()
if self.fullpath:
self.table.add_row([row_list[-1]] + row_list[2:11])
else:
self.table.add_row(row.to_list()[0:-3])
#self.table.add_row(row.to_list()[0:-3])
self.last_result["result"].append(row.to_dict())
else:
# ["path", "redirect", "status", "title", "length","content-type", "lines", "words", "type", "mark"]
for data in result_list:
if self.fullpath:
self.table.add_row([data["url"], data["status"], data["redirect"], data["title"],
data["length"], data["content_type"], data["lines"], data["words"],
data["type"], data["mark"]])
else:
self.table.add_row(
[data["target"], data["path"], data["status"], data["redirect"], data["title"],
data["length"], data["content_type"], data["lines"], data["words"],
data["type"], data["mark"]])
self.last_result["result"] += result_list
self.last_result["total"] = len(self.last_result["result"])
print("Results analysis complete!")
#If the records in the remaining results are still greater than 20 after grouping, judge and filter. Only how much is kept. | Afuzz | /Afuzz-0.1.12.tar.gz/Afuzz-0.1.12/afuzz/lib/result.py | result.py |
import re
from Basic_Math import Elemental_calculation_decimal as Ecd
class DividedByZeroError(Exception):
def __init__(self):
self.__str = '<ERROR> You can\'t divide with zero!'
def __str__(self):
print(self.__str)
def add(args_list):
ret = 0
round = 0
for arg in args_list:
if re.match('[0-9]+\\.[0-9]+', arg) is not None:
ret = Ecd.add([str(ret), arg], round)
continue
ret += int(arg)
return ret
def minus(arg1, arg2):
return int(arg1) - int(arg2)
def multiply(args_list):
ret = 1
for arg in args_list:
ret *= int(arg)
return ret
class division:
def arg2iszero(self):
if self.__arg2 == 0:
raise DividedByZeroError
def arg1iszero(self):
if self.__arg1 == 0:
return 0
def __init__(self, arg1, arg2):
self.__arg1 = int(arg1)
self.__arg2 = int(arg2)
def div_num(self):
self.arg2iszero()
self.arg1iszero()
return self.__arg1 / self.__arg2
def div_fra(self):
self.arg2iszero()
self.arg1iszero()
sign = self.__arg1 * self.__arg2
if self.__arg1 < 0:
self.__arg1 *= -1
if self.__arg2 < 0:
self.__arg2 *= -1
__mod = self.__arg1 % self.__arg2
__int = self.__arg1 // self.__arg2
__fake_upper = __mod + __int * self.__arg2
__ret = ''
if sign < 0:
__ret += '-'
choice = input('Do you want to print fake fracture or true fracture? input "Y" to get true fracture: ')
if choice != 'Y' and choice != 'y':
if __mod != 0:
__ret += str(__fake_upper) + '/' + str(self.__arg2)
elif __mod == 0:
__ret += str(__int)
else:
if self.__arg1 > self.__arg2 and __mod != 0:
__ret += str(__int) + '(' + str(__mod) + '/' + str(self.__arg2) + ')'
elif self.__arg1 < self.__arg2:
__ret += str(__mod) + '/' + str(self.__arg2)
else:
__ret += str(__int)
return __ret | AgCl-s-Math | /AgCl's_Math-0.1.1.tar.gz/AgCl's_Math-0.1.1/src/Basic_Math/Elemental_calculation_integer.py | Elemental_calculation_integer.py |
📦 setup.py (for humans)
=======================
This repo exists to provide [an example setup.py] file, that can be used
to bootstrap your next Python project. It includes some advanced
patterns and best practices for `setup.py`, as well as some
commented–out nice–to–haves.
For example, this `setup.py` provides a `$ python setup.py upload`
command, which creates a *universal wheel* (and *sdist*) and uploads
your package to [PyPi] using [Twine], without the need for an annoying
`setup.cfg` file. It also creates/uploads a new git tag, automatically.
In short, `setup.py` files can be daunting to approach, when first
starting out — even Guido has been heard saying, "everyone cargo cults
thems". It's true — so, I want this repo to be the best place to
copy–paste from :)
[Check out the example!][an example setup.py]
Installation
-----
```bash
cd your_project
# Download the setup.py file:
# download with wget
wget https://raw.githubusercontent.com/navdeep-G/setup.py/master/setup.py -O setup.py
# download with curl
curl -O https://raw.githubusercontent.com/navdeep-G/setup.py/master/setup.py
```
To Do
-----
- Tests via `$ setup.py test` (if it's concise).
Pull requests are encouraged!
More Resources
--------------
- [What is setup.py?] on Stack Overflow
- [Official Python Packaging User Guide](https://packaging.python.org)
- [The Hitchhiker's Guide to Packaging]
- [Cookiecutter template for a Python package]
License
-------
This is free and unencumbered software released into the public domain.
Anyone is free to copy, modify, publish, use, compile, sell, or
distribute this software, either in source code form or as a compiled
binary, for any purpose, commercial or non-commercial, and by any means.
[an example setup.py]: https://github.com/navdeep-G/setup.py/blob/master/setup.py
[PyPi]: https://docs.python.org/3/distutils/packageindex.html
[Twine]: https://pypi.python.org/pypi/twine
[image]: https://farm1.staticflickr.com/628/33173824932_58add34581_k_d.jpg
[What is setup.py?]: https://stackoverflow.com/questions/1471994/what-is-setup-py
[The Hitchhiker's Guide to Packaging]: https://the-hitchhikers-guide-to-packaging.readthedocs.io/en/latest/creation.html
[Cookiecutter template for a Python package]: https://github.com/audreyr/cookiecutter-pypackage
| AgNO3 | /AgNO3-0.0.2.tar.gz/AgNO3-0.0.2/README.md | README.md |
import threading
# I've tried django RWLock. It is very slow, so I prefer
# usual threading.Lock. Yes, it gives exclusive access for readers,
# but faster for five times.
#from agatsuma.third_party.rwlock import RWLock
class MiniCache(object):
"""Implements thread-safe dict-based cache. Intended only for internal usage.
"""
def __init__(self):
self._dict = {}
self._lock = threading.Lock()
#self._lock = RWLock()
def set(self, key, value):
#self._lock.writer_enters()
self._lock.acquire()
try:
self._dict[key] = value
finally:
#self._lock.writer_leaves()
self._lock.release()
def get(self, key):
#self._lock.reader_enters()
self._lock.acquire()
try:
return self._dict[key]
finally:
#self._lock.reader_leaves()
self._lock.release()
def cleanup(self):
#self._lock.writer_enters()
self._lock.acquire()
try:
self._dict = {}
finally:
#self._lock.writer_leaves()
self._lock.release()
def remove(self, key):
#self._lock.writer_enters()
self._lock.acquire()
try:
if key in self._dict:
del self._dict[key]
finally:
#self._lock.writer_leaves()
self._lock.release()
def has_key(self, key):
#self._lock.reader_enters()
self._lock.acquire()
try:
return key in self._dict
finally:
#self._lock.reader_leaves()
self._lock.release()
class EternalInvariantHelper(object):
"""Decorator intended to speed-up absolute invariant functions
(they always return same result and haven't side effects).
Caching is based on args, kwargs are not accounted so wrapped
function should be invariant of kwargs.
"""
def __init__(self, fn):
self._fn = fn
self._cache = MiniCache()
def __call__(self, *args, **kwargs):
#key = args[1] # speed-up for 1/4
key = args # equal to 1. for short tuple
#key = (args, tuple(kwargs.keys()))
try:
#print "cached::::::", key
return self._cache.get(key)
except KeyError:
result = self._fn(*args, **kwargs)
#print "written:::::::", key
self._cache.set(key, result)
return result
def EternalInvariant(function):
clos = EternalInvariantHelper(function)
def wrapper(*args, **kwargs):
return clos(*args, **kwargs)
return wrapper | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/minicache.py | minicache.py |
import sys
import os
import inspect
#import re
#import traceback
from agatsuma import log
from agatsuma.interfaces import AbstractSpell, IInternalSpell
def alist_to_strlist(alist):
return map(lambda atom: str(atom), alist)
class Enumerator(object):
def __init__(self, core, app_directories, forbidden_spells):
self.app_directories = app_directories
self.forbidden_spells = forbidden_spells
self.core = core
#def appBaseName(self):
# return self.__module__.split('.')[0]
def __register_spell(self, spell):
self.core.spellbook.register(spell)
#self.core.spells.append(spell)
#self.core.spellbook[spell.spell_id()] = spell
def __unregister_spell(self, spell):
self.core.spellbook.eliminate(spell)
#self.core.spells.remove(spell)
#del self.core.spellbook[spell.spell_id()]
def enumerate_spells(self, essentialSpellSpaces, additionalSpellPaths):
spell_directories = []
spell_directories.extend(additionalSpellPaths)
if self.app_directories:
spell_directories.extend(self.app_directories)
if not self.core.app_name:
log.core.warning("Application name not provided, so trying to guess one using %s..." % str(self.app_directories))
if type(self.app_directories[0]) == str:
self.core.app_name = self.app_directories[0][0].capitalize() + self.app_directories[0][1:]
else:
self.core.app_name = self.app_directories[0][1][0].capitalize() + self.app_directories[0][1][1:]
log.core.info('Guessed name: %s' % self.core.app_name)
else:
log.core.critical("No main spellpaths to process provided")
if self.core.internal_state.get('mode') == 'setup':
log.core.info("Setup mode detected, so replacing all the spellpaths with Agatsuma itself...")
spell_directories = [(os.path.join(self.core.agatsuma_base_dir, 'agatsuma'), 'agatsuma')]
log.core.debug("Spellpaths to process:")
for p in spell_directories:
log.core.debug("* %s" % str(p))
log.core.debug("System paths:")
for p in sys.path:
log.core.debug("* %s" % p)
log.core.info("Collecting names of possible spells...")
namespacesToImport = []
namespacesToImport.extend(essentialSpellSpaces)
for spellsDir in spell_directories:
#spellsDir = #os.path.realpath(os.path.join(self.OPT.appPath, 'controllers'))
#sys.path.append(spellsDir)
if not type(spellsDir) is tuple:
basicNamespace = spellsDir.replace(os.path.sep, '.') #os.path.basename(spellsDir)
else:
basicNamespace = spellsDir[1]
spellsDir = spellsDir[0]
log.core.info("Processing spells directory: %s" % spellsDir)
log.core.info("Spells namespace: %s" % basicNamespace)
for root, dirs, files in os.walk(spellsDir):
def useFilePred(file):
if file in self.forbidden_spells:
log.core.warning('File ignored due app settings: %s' % file)
return False
return True
fileList = filter(lambda x: x.endswith('.py') and not x.startswith('__'), files)
fileList = filter(useFilePred, fileList)
fileList = map(lambda x: os.path.join(root, x), fileList)
fileList = map(lambda x: os.path.splitext(x)[0], fileList)
fileList = map(lambda x: x.replace(spellsDir + os.path.sep, ''), fileList)
fileList = map(lambda x: x.replace(os.path.sep, '.'), fileList)
fileList = map(lambda x: "%s.%s" % (basicNamespace, x), fileList)
namespacesToImport.extend(fileList)
#idRe = re.compile('^[\w]+$')
spells = {}
provides = {}
namespacesToImport = list(set(namespacesToImport))
log.core.debug('Collected namespaces: %s' % str(namespacesToImport))
log.core.info('Started spells enumerator...')
for nsToImport in namespacesToImport:
if not nsToImport in self.forbidden_spells:
#log.core.info('trying %s...' % nsToImport)
mod = None
try:
mod = __import__(nsToImport, {}, {}, '*', -1)
except Exception, e:
log.core.warning('Exception while importing %s: %s' % (nsToImport, str(e)))
#traceback.print_exc()
mod = None
possibleSpells = []
plPredicate = lambda x: type(x) == type and issubclass(x, AbstractSpell) and x != AbstractSpell
possibleSpells = inspect.getmembers(mod, plPredicate)
if possibleSpells:
possibleSpells = map(lambda x: x[1], possibleSpells)
for possibleSpell in possibleSpells:
instance = possibleSpell()
plid = instance.spell_id()
#if not idRe.match(plid):
# raise Exception("Incorrect spell Id: %s" % plid)
log.core.info("Spell found: %s; base=%s" % (plid, nsToImport))
if not spells.has_key(plid):
nsName = mod.__name__
ns = mod #__import__(nsName, stateVars, {}, '*', -1)
instance._set_details(
namespace = ns,
namespace_name = nsName,
file_name = ns.__file__.replace(spellsDir + os.path.sep, '')
)
spells[plid] = instance
prov = instance.provides()
if prov:
for provId in prov:
if not provId in provides:
provides[provId] = []
provides[provId].append(plid)
#log.core.info("Successfully imported: %s; %s; %s" % (ns, nsName, nsToImport))
else:
log.core.critical("POSSIBLE CONFLICT: Spell with id '%s' already imported!" % plid)
else:
log.core.info('Not a spellspace: %s' % nsToImport)
else:
log.core.warning('Namespace ignored due app settings: %s' % nsToImport)
falseSpells = []
for provId in provides:
deps = provides[provId]
log.core.debug("Functionality '%s' provided by %s" % (provId, alist_to_strlist(deps)))
newId = "[%s]" % provId
falseSpell = AbstractSpell(newId, {'info' : 'Dependencies helper for %s' % provId,
'deps' : tuple(deps),
})
spells[newId] = falseSpell
falseSpells.append(falseSpell)
spellsList = spells.values()
internalSpells = filter(lambda spell: issubclass(type(spell), IInternalSpell), spellsList)
log.core.info("IMPORT STAGE COMPLETED. Imported %d spells (%d provided by Agatsuma, %d fake spells for groups):"
% (len(spells), len(internalSpells), len(falseSpells)))
self.print_spells_list(spellsList)
log.core.info('RESOLVING DEPENDENCIES...')
needCheck = True
while needCheck:
needCheck = False
for id in spells.keys():
deps = spells[id].deps()
if deps:
for dep in deps:
if not dep in spells:
log.core.warning('[WARNING] Disconnected: "%s"; non-existent dependence: "%s"' % (id, dep))
for falseSpell in falseSpells:
falseSpell._remove_dep(id)
del spells[id]
needCheck = True
break
log.core.info('Arranging spells...')
resolved = []
needIteration = True
while needIteration:
needIteration = False
for id in spells.keys():
deps = spells[id].deps()
ok = True
if deps:
for dep in deps:
ok = ok and dep in resolved
if ok:
#if not deps:
# log.core.info('No dependencies for "%s"; adding as %d' % (id, len(self.spells)))
#else:
# log.core.info('Already resolved dependencies for "%s"; adding as %d' % (id, len(self.spells)))
self.__register_spell(spells[id])
resolved.append(id)
del spells[id]
needIteration = True
break
#log.core.info('new iteration, already resolved: %s' % resolved)
cyclicDeps = sorted(spells.values(), lambda a, b: cmp(len(a.deps()), len(b.deps())))
for spell in cyclicDeps:
log.core.warning('[WARNING] Adding loop-dependant spell "%s" (deps: %s)' % (spell.spell_id(), str(spell.deps())))
self.__register_spell(spell)
spellsNames = self.core.spellbook.all_names() # map(lambda p: p.spell_id(), self.core.spells)
log.core.debug("Connected %d spells: %s. False spells will be removed now" % (len(spellsNames), str(spellsNames)))
for spell in falseSpells:
self.__unregister_spell(spell)
spellsNames = self.core.spellbook.all_names() # map(lambda p: p.spell_id(), self.core.spells)
log.core.info("RESOLVING STAGE COMPLETED. Connected %d spells: %s" % (len(spellsNames), str(spellsNames)))
log.core.info('SPELLS ENUMERATING COMPLETED')
def eagerUnload(self):
log.core.debug("Performing eager unload...")
toUnload = filter(lambda spell: spell.config.get('eager_unload', None),
self.core.spellbook.to_list())
for spell in toUnload:
log.core.debug('Eager unloading "%s"' % spell.spell_id())
self.__unregister_spell(spell)
def print_spells_list(self, spells):
for spell in spells:
log.core.info("* %s, %s, %s" % (spell.spell_id(), spell.namespace_name(), spell.file_name())) | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/enumerator.py | enumerator.py |
class AbstractSpell(object):
""" Base class for all the spells. It have some important methods
and callbacks. When Core traversing directories it looks for
implementations of this interface and threats them as spells.
:param spell_id: unique identifier (name) of this spell.
Should match the ``\w+`` regex.
:param spellConfig: dict with optional spell parameters.
The following spell parameters are supported now:
#. `info` : any string containing readable description for
this spell
#. `deps` : tuple of strings which are identifiers of spells
required for this spell to work (*dependencies*)
#. `provides` : tuple of strings that describes *functionality*
provided by this spell (*webPageRender* or *DatabaseDriver*
for example)
#. `requires` : tuple of strings that describes which
functionality required for this spell to work.
#. `eager_unload` : boolean parameter. When it's set to ``True``
core unregisters spell recently after completing
:meth:`agatsuma.interfaces.AbstractSpell.post_configure` calls.
This may usable for spells which only required for perform some
application initialization, such as settings registering
and data preparation. Also it may be suitable for
:ref:`dependencies helpers<dependencies-helpers>`.
"""
def __init__(self, spell_id, spellConfig = {}):
self.__pId = spell_id
self.config = spellConfig
# spell config
if spellConfig:
self.__pName = spellConfig.get('info', None)
self.__pdeps = list(spellConfig.get('deps', () ))
self.__pProvides = spellConfig.get('provides', () )
for requirement in spellConfig.get('requires', () ):
self.__pdeps.append('[%s]' % requirement)
self.__pdeps = tuple(self.__pdeps)
# internal variables, init in app_globals.py
self._set_details(None, '', '')
def _set_details(self, namespace, namespace_name, file_name):
""" *For internal usage only* """
self.pnamespace = namespace
self.pnamespace_name = namespace_name
self.pfile_name = file_name
def _remove_dep(self, dep):
""" Removes dependency ID from dependency tuple. *For internal usage only* """
if dep in self.__pdeps:
deps = list(self.__pdeps)
deps.remove(dep)
self.__pdeps = tuple(deps)
def spell_id(self):
""" Returns name of this spell
(see `spellConfig` constructor parameter) """
return self.__pId
def deps(self):
""" Returns tuple consists of names of spells required for this spell
to work (see `spellConfig` constructor parameter).
"""
return self.__pdeps
def provides(self):
""" Returns tuple consists of names of functionality that spell provides
(see `spellConfig` constructor parameter).
"""
return self.__pProvides
def file_name(self):
""" Returns file name for file containing this spell """
return self.pfile_name
def namespace_name(self):
""" Returns namespace name (eg. myapp.foo.bar) for namespace
containing this spell.
"""
return self.pnamespace_name
def namespace(self):
""" Returns namespace (not the namespace name but namespace itself!)
containing this spell.
"""
return self.pnamespace
def pre_configure(self, core):
""" Core calls this method before settings settings service
initialization. All the options that are needed for spell
to work should be registered in this method using core method
:meth:`agatsuma.core.Core.register_option`
*Should be overriden in subclasses*
"""
pass
def post_configure(self, core):
""" Core calls this method subsequent to settings initialization.
This method intended to preconfigure application related to loaded
settings (run some threads or open database connections for example)
*Should be overriden in subclasses*
"""
pass
def post_config_update(self, **kwargs):
""" Settings service calls this method when any writable setting is
updated. This method may be used to send updated data to worker
processes for example.
*Should be overriden in subclasses*
"""
pass | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/interfaces/abstract_spell.py | abstract_spell.py |
class ISetupSpell(object):
"""
"""
def requirements(self):
"""This method should return dict with string keys
and lists of strings as values.
Dict values describes describe spell dependencies
setuptools format (like ``['libfoo>=0.1.2', 'libbar=1.2']``)
Dict keys are responsible to dependencies groups that
may be used to install only the required dependencies.
When you call *setup.py* all the requirements from
all the available spells will be added to dependencies list.
So if spell can't be imported due to non-existent dependencies
(``import libfoo`` for non-existent ``libfoo`` in spell's file)
it will be not loaded and it's dependencies will be not
added to dependencies list.
It may be good idea to place spell with requirements into
separate file without dangerous imports.
But here is a big problem: if module containing spell
does imports that may not be installed on target system
when core will fail at importing stage and will not
load spell. You have at least 3 slightly different solutions for this
problem.
.. _dependencies-helpers:
Assume you've written file ``foo.py`` with the following
content::
import SomeBigLibrary
from agatsuma.interfaces import AbstractSpell
class MySpell(AbstractSpell):
def __init__(self):
config = {'info' : 'My Spell',
}
AbstractSpell.__init__(self, 'my_spell', config)
def requirements(self):
return {'importantlibs' : []'SomeBigLibrary>=1.2.3'],
}
def something(self):
SomeBigLibrary.makeAllGood()
**Solution 1** split ``foo.py`` into two files (``foo.py`` and
``foo_deps.py`` for example):
``foo.py``::
import SomeBigLibrary
from agatsuma.interfaces import AbstractSpell
class MySpell(AbstractSpell):
def __init__(self):
config = {'info' : 'My Spell',
}
AbstractSpell.__init__(self, 'my_spell', config)
def something(self):
SomeBigLibrary.makeAllGood()
``foo_deps.py``::
from agatsuma.interfaces import AbstractSpell
class MySpellDependenciesHelper(AbstractSpell):
def __init__(self):
config = {'info' : 'My Spell dependencies helper',
'eager_unload' : True,
}
AbstractSpell.__init__(self, 'my_spell_dephelper', config)
def requirements(self):
return {'importantlibs' : ['SomeBigLibrary>=1.2.3'],
}
**Solution 2** avoid of using global imports:
``foo.py``::
from agatsuma.interfaces import AbstractSpell
class MySpell(AbstractSpell):
def __init__(self):
config = {'info' : 'My Spell',
}
AbstractSpell.__init__(self, 'my_spell', config)
def requirements(self):
return {'importantlibs' : ['SomeBigLibrary>=1.2.3'],
}
def something(self):
import SomeBigLibrary
SomeBigLibrary.makeAllGood()
**Solution 3** use of the :attr:`agatsuma.core.Core.internal_state`.
``foo.py``::
from agatsuma.core import Core
if Core.internal_state.get('mode', None) == 'normal':
import SomeBigLibrary
from agatsuma.interfaces import AbstractSpell
class MySpell(AbstractSpell):
def __init__(self):
config = {'info' : 'My Spell',
}
AbstractSpell.__init__(self, 'my_spell', config)
def requirements(self):
return {'importantlibs' : ['SomeBigLibrary>=1.2.3'],
}
def something(self):
SomeBigLibrary.makeAllGood()
"""
return {}
def py_entry_points(self):
"""
{'section' : [('name', 'namespace', 'entrypoint'), ],
}
"""
return {} | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/interfaces/i_setup_spell.py | i_setup_spell.py |
from abc import ABCMeta as _ABCMeta
from abc import abstractmethod
class EAbstractFunctionCall(Exception):
def __init__(self, fn, *args, **kwargs):
self.__path = "%s" % (fn.__module__, )
self.__name = fn.__name__
self.__args = args
self.__kwargs = kwargs
Exception.__init__(self, self.__repr__())
def __repr__(self):
return "Call to abstract method '%s(%s, %s)' at '%s'" % (self.__name,
str(self.__args),
str(self.__kwargs),
self.__path)
def durable_abstractmethod(f):
#def dummy(*args, **kwargs):
# return f(*args, **kwargs)
#print dir(f), f, str(f), repr(f), type(f), type(f).__name__
#dummy = lambda *args, **kwargs : f(*args, **kwargs)
def dummy(*args, **kwargs):
raise EAbstractFunctionCall(f, *args, **kwargs)
dummy.__doc__ = f.__doc__
if hasattr(f, "__name__"):
dummy.__name__ = f.__name__
else:
dummy.__name__ = type(f).__name__
dummy.__decorated__ = f
return abstractmethod(dummy)
class ABCMeta(_ABCMeta):
_verbose = False
def __call__(cls, *args, **kwargs):
try:
return _ABCMeta.__call__(cls, *args, **kwargs)
except TypeError, e:
if hasattr(cls, "__abstractmethods__"):
abstracts = map(lambda name: getattr(cls, name), cls.__abstractmethods__)
cls._raise_error(cls.__name__, "instantiate", abstracts)
else:
raise e
@classmethod
def _raise_error(cls, name, description, abstracts):
separator = ": "
if cls._verbose:
separator = "\n\n"
text = "Can't %s class '%s' with %d abstract methods%s" % (
description,
name,
len(abstracts),
separator
)
text += cls._format_problems(abstracts)
raise TypeError(text)
@classmethod
def _format_problems(cls, abstracts):
print cls, cls._verbose
descriptions = map(lambda m: cls.__format_problem(m), abstracts)
if cls._verbose:
return '\n\n'.join(descriptions)
return ', '.join(descriptions)
@classmethod
def __format_problem(cls, method):
methodname = method.__name__
if hasattr(method, "__decorated__"):
methodname = "Decorated method '%s'" % method.__name__
if cls._verbose:
docstring = method.__doc__
if not docstring:
docstring = "Undocumented"
lines = docstring.splitlines()
lines = map(lambda s: " | %s" % s, lines)
docstring = '\n'.join(lines)
maxlen = 100
if len(docstring) > maxlen:
docstring = "%s..." % docstring[:maxlen]
return "* %s:\n%s" % (methodname, docstring)
else:
return methodname
class ABCStrictMeta(ABCMeta):
def __new__(cls, name, bases, ns, *args, **kwargs):
problems = []
for base in bases:
if hasattr(base, "__abstractmethods__"):
for abstract in base.__abstractmethods__:
if abstract in ns:
pass
else:
problems.append(getattr(base, abstract))
if problems:
cls._raise_error(name, "define", problems)
return _ABCMeta.__new__(cls, name, bases, ns, *args, **kwargs)
class ABCMetaVerbose(ABCMeta):
_verbose = True
class ABCStrictMetaVerbose(ABCStrictMeta):
_verbose = True
if __name__ == "__main__":
import doctest
doctest.testmod() | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/adaptations/abclasses.py | abclasses.py |
from agatsuma import Implementations
from agatsuma.interfaces import ISetupSpell, IInternalSpell
def run_setuptools(**kwargs):
from setuptools import setup
from agatsuma.third_party.distribute_setup import use_setuptools
use_setuptools()
setup(**kwargs)
######################################################################
## Entry points
def collectEntryPoints(spells_filter):
spells = Implementations(ISetupSpell)
spells = filter(spells_filter, spells)
sections = {}
for spell in spells:
pointsdict = spell.py_entry_points()
for section in pointsdict:
if not sections.get(section, None):
sections[section] = []
points = pointsdict[section]
sections[section].extend(points)
return sections
def formatEntryPoints(epoints):
out = ""
for section, points in epoints.items():
out += "[%s]\n" % section
for point in points:
out += "%s = %s:%s\n" % (point[0], point[1], point[2])
return out
def entry_pointsInfo(spells_filter):
entry_pointsDict = collectEntryPoints(spells_filter)
return formatEntryPoints(entry_pointsDict)
######################################################################
## Dependencies
def __withoutIInternalSpells(spell):
return not issubclass(type(spell), IInternalSpell)
def depinfo(groupChecker, spells_filter):
spells = Implementations(ISetupSpell)
spells = filter(spells_filter, spells)
depGroups = []
dependencies = []
depGroupsContent = {}
for spell in spells:
depdict = spell.requirements()
for group in depdict:
depGroups.append(group)
if not depGroupsContent.get(group, None):
depGroupsContent[group] = []
deps = depdict[group]
depGroupsContent[group].extend(deps)
if groupChecker(group):
dependencies.extend(deps)
dependencies = list(set(dependencies))
return dependencies, depGroups, depGroupsContent
######################################################################
## Debug printouts
def out(s):
#log.setup.info
print s
def nl():
out("="*60)
def printDeps(dependencies, depGroups, depGroupsContent, depGroupEnabled):
out("The following dependencies classes are present:")
out("(Use --disable-all to disable all the dependencies)")
for group in depGroups:
formatString = "[ ] %s: %s "
if depGroupEnabled(group):
formatString = "[*] %s: %s"
out(formatString % (group, str(depGroupsContent[group])))
out(" Use --without-%s to disable" % group)
out(" Use --with-%s to enable" % group)
nl()
out("The following dependencies list will be used:\n%s" % str(dependencies))
out("NOTE: You can use AGATSUMA_CONF environment variable to pass options")
out("NOTE: Dependencies may not work under easy_setup. Use pip!")
######################################################################
## Useful routines
def filter_arguments(args):
args = filter(lambda s: not s.startswith('--with'), args)
args = filter(lambda s: s != "--disable-all", args)
return args
def groups_predicate(args):
components = filter(lambda s: s.startswith('--with'), args)
depsDisabled = "--disable-all" in args
def depGroupEnabled(group):
depEnabled =(not (depsDisabled or ('--without-%s' % group) in components)
or (depsDisabled and ('--with-%s' % group) in components))
return depEnabled
return depGroupEnabled
def get_dependencies(depGroupsFilter, spells_filter = __withoutIInternalSpells):
dependencies, depGroups, depGroupsContent = depinfo(depGroupsFilter,
spells_filter)
printDeps(dependencies, depGroups, depGroupsContent, depGroupsFilter)
return dependencies
def get_entry_points(spells_filter = __withoutIInternalSpells):
entry_points = entry_pointsInfo(spells_filter)
nl()
out("The following entry points are provided: %s" % entry_points)
nl()
return entry_points | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/adaptations/distribute.py | distribute.py |
import re
import copy
from agatsuma import log
from agatsuma import Settings
from agatsuma import SpellByStr
from agatsuma.interfaces import AbstractSpell, IInternalSpell
from agatsuma.commons.types import Atom
class SettingsSpell(AbstractSpell, IInternalSpell):
def __init__(self):
config = {'info' : 'Agatsuma Settings Spell',
'deps' : (),
'requires' : (Atom.settings_backend, ),
}
AbstractSpell.__init__(self, Atom.agatsuma_settings, config)
def pre_configure(self, core):
log.new_logger("settings")
core.register_option("!core.settings_storage_uri", unicode, "Settings storage URI")
core.register_option("!core.recovery", bool, "Recovery mode")
def post_configure(self, core):
Settings.save = self.save
log.core.debug('Settings.save method overriden')
storageUri = Settings.core.settings_storage_uri
recovery = Settings.core.recovery
self.backend = None
if not recovery:
log.settings.info("Initializing Settings Storage..")
rex = re.compile(r"^(\w+)\+(.*)$")
match = rex.match(storageUri)
if match:
backendId = match.group(1)
uri = match.group(2)
spellName = "agatsuma_settings_backend_%s" % backendId
spell = SpellByStr(spellName)
if spell:
self.backend = spell.instantiate_backend(uri)
else:
raise Exception("Settings backend improperly configured: spell '%s' not found" % spellName)
else:
raise Exception("Incorrect settings storage URI")
else:
log.settings.warning("Running in recovery mode, settings in storage are ignored")
if self.backend:
log.settings.info("Updating writable settings from storage '%s'..." % self.backend.__class__.__name__)
updated = 0
for groupName in Settings.settings:
group = Settings.settings[groupName]
newGroup = copy.deepcopy(group)
updatedInGroup = 0
for setting in group:
if not setting in Settings.readonly_settings[groupName]:
curVal = group[setting]
newVal = self.backend.get("%s.%s" % (groupName, setting), curVal)
if newVal != curVal:
newGroup[setting] = newVal
updated += 1
updatedInGroup += 1
if updatedInGroup:
Settings.settings[groupName] = newGroup
if updated:
Settings.set_config_data(Settings.settings)
log.settings.info("Settings updated from storage: %d" % updated)
def save(self):
log.settings.info("Writing settings into storage '%s'..." % self.backend.__class__.__name__)
written = 0
for groupName in Settings.settings:
group = Settings.settings[groupName]
for setting in group:
if not setting in Settings.readonly_settings[groupName]:
self.backend.save("%s.%s" % (groupName, setting), group[setting])
written += 1
log.settings.info("Settings written into storage: %d" % written) | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/spells/common/core_settings.py | core_settings.py |
import copy
from agatsuma.core import Core
if Core.internal_state.get("mode", None) == "normal":
import sqlalchemy as sa
import sqlalchemy.orm as orm
else:
sa = None
from agatsuma import log
from agatsuma import Settings
from agatsuma import Implementations
from agatsuma.interfaces import AbstractSpell, IInternalSpell
from agatsuma.interfaces import IStorageSpell, IModelSpell, ISetupSpell
from agatsuma.commons.types import Atom
class SQLASpell(AbstractSpell, IInternalSpell, IStorageSpell, ISetupSpell):
""".. _sqla-driver:
"""
def __init__(self):
config = {'info' : 'Agatsuma SQLAlchemy Spell',
'deps' : (Atom.agatsuma_core, ),
'provides' : (Atom.storage_driver, ),
}
AbstractSpell.__init__(self, Atom.agatsuma_sqla, config)
if sa:
SQLASpell.proto_metadata = sa.MetaData()
def requirements(self):
return {"sqla" : ["sqlalchemy>=0.6.1"],
}
def deploy(self, *args, **kwargs):
spells = Implementations(IModelSpell)
log.storage.info("Initializing Database...")
if spells:
if "recreate" in args:
log.storage.info("Recreating schema...")
self.meta.drop_all()
self.meta.create_all()
for spell in spells:
spell.perform_deployment(Core.instance)
log.storage.info("Deployment completed")
else:
log.storage.info("Model spells not found")
def pre_configure(self, core):
core.register_option("!sqla.uri", unicode, "SQLAlchemy engine URI")
core.register_option("!sqla.parameters", dict, "kwargs for create_engine")
core.register_entry_point("agatsuma:sqla_init", self.deploy)
def post_configure(self, core):
spells = Implementations(IModelSpell)
if spells:
log.storage.info("Initializing SQLAlchemy engine and session...")
self.SqlaEngine = sa.create_engine(Settings.sqla.uri, **Settings.sqla.parameters)
SessionFactory = orm.sessionmaker()
self.Session = orm.scoped_session(SessionFactory)
self.Session.configure(bind=self.SqlaEngine)
log.storage.info("Initializing SQLAlchemy data model..")
for spell in spells:
spell.init_metadata(SQLASpell.proto_metadata)
SQLASpell.meta = SQLASpell.metadata_copy()
SQLASpell.meta.bind = self.SqlaEngine
log.storage.info("Setting up ORM...")
for spell in spells:
spell.setup_orm(core)
log.storage.info("Model initialized")
self.sqla_default_session = self.makeSession()
for spell in spells:
spell.post_orm_setup(core)
else:
log.storage.info("Model spells not found")
def makeSession(self):
"""
Instantiates new session using ScopedSession helper
"""
return self.Session()
@staticmethod
def metadata_copy():
meta = copy.deepcopy(SQLASpell.proto_metadata)
# little bugfix
meta.ddl_listeners = sa.util.defaultdict(list)
return meta | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/spells/common/storage_drivers/core_sqla.py | core_sqla.py |
import re
try:
import cPickle as pickle
except ImportError:
import pickle
from agatsuma import log
from agatsuma import Spell
from agatsuma.interfaces import (AbstractSpell,
IInternalSpell,
ISettingsBackendSpell,
AbstractSettingsBackend)
from agatsuma.commons.types import Atom
class MemcachedAbstractSettingsBackend(AbstractSettingsBackend):
def __init__(self, uri):
AbstractSettingsBackend.__init__(self)
self.uri = uri
self.init_connection()
def init_connection(self):
log.settings.info("Initializing Memcached settings backend "\
"using URI '%s'" % self.uri)
self.keyprefix = self._parse_memcached_prefix_uri(self.uri)
memcachedSpell = Spell(Atom.agatsuma_memcached)
self.pool = memcachedSpell.get_connection_pool()
@property
def connection(self):
with self.pool.reserve() as mc:
return mc
def _getPrefixedKey(self, sessionId):
if self.keyprefix:
return str("%s_%s" % (self.keyprefix, sessionId))
return sessionId
@staticmethod
def _parse_memcached_prefix_uri(details):
# memprefix://prefixname
match = re.match('^memprefix://(\w+)$', details)
return match.group(1) if match else ''
def get(self, name, currentValue):
data = self.connection.get(self._getPrefixedKey(name))
if data:
return pickle.loads(data)
return currentValue
def save(self, name, value):
if not self.connection.set(self._getPrefixedKey(name),
pickle.dumps(value)):
log.settings.critical("Saving setting '%s' failed" % name)
class MemcachedSettingsSpell(AbstractSpell, IInternalSpell, ISettingsBackendSpell):
def __init__(self):
config = {'info' : 'Memcached settings storage',
'deps' : (Atom.agatsuma_memcached, ),
'provides' : (Atom.settings_backend, )
}
AbstractSpell.__init__(self, Atom.agatsuma_settings_backend_memcached,
config)
def instantiate_backend(self, uri):
self.managerInstance = MemcachedAbstractSettingsBackend(uri)
return self.managerInstance | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/spells/common/settings_backends/memcached_backend.py | memcached_backend.py |
import re
import pymongo
from agatsuma import log
from agatsuma import Spell
from agatsuma.interfaces import AbstractSpell, IInternalSpell
from agatsuma.interfaces import ISettingsBackendSpell, AbstractSettingsBackend
from agatsuma.commons.types import Atom
class MongoAbstractSettingsBackend(AbstractSettingsBackend):
def __init__(self, uri):
self.uri = uri
self.init_connection()
def init_connection(self):
log.settings.info("Initializing MongoDB settings backend using URI '%s'" % self.uri)
connData = MongoAbstractSettingsBackend._parse_mongo_table_uri(self.uri)
mongoSpell = Spell(Atom.agatsuma_mongodb)
self.connection = mongoSpell.connection
self.dbCollection = getattr(mongoSpell, connData[0])
self.db = getattr(self.dbCollection, connData[1])
@staticmethod
def _parse_mongo_table_uri(details):
# mongotable://collection/table
match = re.match('^mongotable://(\w+)/(\w+)$', details)
return match.group(1), match.group(2)
def get(self, name, currentValue):
try:
data = self.db.find_one({'name': name})
self.connection.end_request()
if data:
return data["value"]
except pymongo.errors.AutoReconnect:
log.settings.critical("Mongo exception during loading %s" % name)
except Exception, e:
log.settings.critical("Unknown exception during loading: %s" % str(e))
self.connection.end_request()
return currentValue
def save(self, name, value):
try:
self.db.update(
{'name': name}, # equality criteria
{'name' : name,
'value': value,
}, # new document
upsert=True)
self.connection.end_request()
except pymongo.errors.AutoReconnect:
log.settings.critical("Mongo exception during saving %s=%s" % (name, str(value)))
class MongoSettingsSpell(AbstractSpell, IInternalSpell, ISettingsBackendSpell):
def __init__(self):
config = {'info' : 'MongoDB settings storage',
'deps' : (Atom.agatsuma_mongodb, ),
'provides' : (Atom.settings_backend, )
}
AbstractSpell.__init__(self, Atom.agatsuma_settings_backend_mongo, config)
def instantiate_backend(self, uri):
self.managerInstance = MongoAbstractSettingsBackend(uri)
return self.managerInstance
def pre_configure(self, core):
core.register_entry_point("mongodb:settings:cleanup", self.entry_point)
def entry_point(self, *args, **kwargs):
log.settings.info("Cleaning up settings in MongoDB")
self.managerInstance.cleanup() | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/spells/common/settings_backends/mongo_backend.py | mongo_backend.py |
import logging.handlers
import re
import sys
import types
IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)
def valid_ident(s):
m = IDENTIFIER.match(s)
if not m:
raise ValueError('Not a valid Python identifier: %r' % s)
return True
#
# This function is defined in logging only in recent versions of Python
#
try:
from logging import _checkLevel
except ImportError:
def _checkLevel(level):
if isinstance(level, int):
rv = level
elif str(level) == level:
if level not in logging._levelNames:
raise ValueError('Unknown level: %r' % level)
rv = logging._levelNames[level]
else:
raise TypeError('Level not an integer or a '
'valid string: %r' % level)
return rv
# The ConvertingXXX classes are wrappers around standard Python containers,
# and they serve to convert any suitable values in the container. The
# conversion converts base dicts, lists and tuples to their wrapped
# equivalents, whereas strings which match a conversion format are converted
# appropriately.
#
# Each wrapper should have a configurator attribute holding the actual
# configurator to use for conversion.
class ConvertingDict(dict):
"""A converting dictionary wrapper."""
def __getitem__(self, key):
value = dict.__getitem__(self, key)
result = self.configurator.convert(value)
#If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def get(self, key, default=None):
value = dict.get(self, key, default)
result = self.configurator.convert(value)
#If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def pop(self, key, default=None):
value = dict.pop(self, key, default)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
class ConvertingList(list):
"""A converting list wrapper."""
def __getitem__(self, key):
value = list.__getitem__(self, key)
result = self.configurator.convert(value)
#If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def pop(self, idx=-1):
value = list.pop(self, idx)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
return result
class ConvertingTuple(tuple):
"""A converting tuple wrapper."""
def __getitem__(self, key):
value = tuple.__getitem__(self, key)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
class BaseConfigurator(object):
"""
The configurator base class which defines some useful defaults.
"""
CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')
WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
DIGIT_PATTERN = re.compile(r'^\d+$')
value_converters = {
'ext' : 'ext_convert',
'cfg' : 'cfg_convert',
}
# We might want to use a different one, e.g. importlib
importer = __import__
def __init__(self, config):
self.config = ConvertingDict(config)
self.config.configurator = self
def resolve(self, s):
"""
Resolve strings to objects using standard import and attribute
syntax.
"""
name = s.split('.')
used = name.pop(0)
try:
found = self.importer(used)
for frag in name:
used += '.' + frag
try:
found = getattr(found, frag)
except AttributeError:
self.importer(used)
found = getattr(found, frag)
return found
except ImportError:
e, tb = sys.exc_info()[1:]
v = ValueError('Cannot resolve %r: %s' % (s, e))
v.__cause__, v.__traceback__ = e, tb
raise v
def ext_convert(self, value):
"""Default converter for the ext:// protocol."""
return self.resolve(value)
def cfg_convert(self, value):
"""Default converter for the cfg:// protocol."""
rest = value
m = self.WORD_PATTERN.match(rest)
if m is None:
raise ValueError("Unable to convert %r" % value)
else:
rest = rest[m.end():]
d = self.config[m.groups()[0]]
#print d, rest
while rest:
m = self.DOT_PATTERN.match(rest)
if m:
d = d[m.groups()[0]]
else:
m = self.INDEX_PATTERN.match(rest)
if m:
idx = m.groups()[0]
if not self.DIGIT_PATTERN.match(idx):
d = d[idx]
else:
try:
n = int(idx) # try as number first (most likely)
d = d[n]
except TypeError:
d = d[idx]
if m:
rest = rest[m.end():]
else:
raise ValueError('Unable to convert '
'%r at %r' % (value, rest))
#rest should be empty
return d
def convert(self, value):
"""
Convert values to an appropriate type. dicts, lists and tuples are
replaced by their converting alternatives. Strings are checked to
see if they have a conversion format and are converted if they do.
"""
if not isinstance(value, ConvertingDict) and isinstance(value, dict):
value = ConvertingDict(value)
value.configurator = self
elif not isinstance(value, ConvertingList) and isinstance(value, list):
value = ConvertingList(value)
value.configurator = self
elif not isinstance(value, ConvertingTuple) and\
isinstance(value, tuple):
value = ConvertingTuple(value)
value.configurator = self
elif isinstance(value, basestring): # str for py3k
m = self.CONVERT_PATTERN.match(value)
if m:
d = m.groupdict()
prefix = d['prefix']
converter = self.value_converters.get(prefix, None)
if converter:
suffix = d['suffix']
converter = getattr(self, converter)
value = converter(suffix)
return value
def configure_custom(self, config):
"""Configure an object with a user-supplied factory."""
c = config.pop('()')
if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
c = self.resolve(c)
props = config.pop('.', None)
# Check for valid identifiers
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
result = c(**kwargs)
if props:
for name, value in props.items():
setattr(result, name, value)
return result
def as_tuple(self, value):
"""Utility function which converts lists to tuples."""
if isinstance(value, list):
value = tuple(value)
return value
class DictConfigurator(BaseConfigurator):
"""
Configure logging using a dictionary-like object to describe the
configuration.
"""
def configure(self):
"""Do the configuration."""
config = self.config
if 'version' not in config:
raise ValueError("dictionary doesn't specify a version")
if config['version'] != 1:
raise ValueError("Unsupported version: %s" % config['version'])
incremental = config.pop('incremental', False)
EMPTY_DICT = {}
logging._acquireLock()
try:
if incremental:
handlers = config.get('handlers', EMPTY_DICT)
# incremental handler config only if handler name
# ties in to logging._handlers (Python 2.7)
if sys.version_info[:2] == (2, 7):
for name in handlers:
if name not in logging._handlers:
raise ValueError('No handler found with '
'name %r' % name)
else:
try:
handler = logging._handlers[name]
handler_config = handlers[name]
level = handler_config.get('level', None)
if level:
handler.setLevel(_checkLevel(level))
except StandardError, e:
raise ValueError('Unable to configure handler '
'%r: %s' % (name, e))
loggers = config.get('loggers', EMPTY_DICT)
for name in loggers:
try:
self.configure_logger(name, loggers[name], True)
except StandardError, e:
raise ValueError('Unable to configure logger '
'%r: %s' % (name, e))
root = config.get('root', None)
if root:
try:
self.configure_root(root, True)
except StandardError, e:
raise ValueError('Unable to configure root '
'logger: %s' % e)
else:
disable_existing = config.pop('disable_existing_loggers', True)
logging._handlers.clear()
del logging._handlerList[:]
# Do formatters first - they don't refer to anything else
formatters = config.get('formatters', EMPTY_DICT)
for name in formatters:
try:
formatters[name] = self.configure_formatter(
formatters[name])
except StandardError, e:
raise ValueError('Unable to configure '
'formatter %r: %s' % (name, e))
# Next, do filters - they don't refer to anything else, either
filters = config.get('filters', EMPTY_DICT)
for name in filters:
try:
filters[name] = self.configure_filter(filters[name])
except StandardError, e:
raise ValueError('Unable to configure '
'filter %r: %s' % (name, e))
# Next, do handlers - they refer to formatters and filters
# As handlers can refer to other handlers, sort the keys
# to allow a deterministic order of configuration
handlers = config.get('handlers', EMPTY_DICT)
for name in sorted(handlers):
try:
handler = self.configure_handler(handlers[name])
handler.name = name
handlers[name] = handler
except StandardError, e:
raise ValueError('Unable to configure handler '
'%r: %s' % (name, e))
# Next, do loggers - they refer to handlers and filters
#we don't want to lose the existing loggers,
#since other threads may have pointers to them.
#existing is set to contain all existing loggers,
#and as we go through the new configuration we
#remove any which are configured. At the end,
#what's left in existing is the set of loggers
#which were in the previous configuration but
#which are not in the new configuration.
root = logging.root
existing = root.manager.loggerDict.keys()
#The list needs to be sorted so that we can
#avoid disabling child loggers of explicitly
#named loggers. With a sorted list it is easier
#to find the child loggers.
existing.sort()
#We'll keep the list of existing loggers
#which are children of named loggers here...
child_loggers = []
#now set up the new ones...
loggers = config.get('loggers', EMPTY_DICT)
for name in loggers:
if name in existing:
i = existing.index(name)
prefixed = name + "."
pflen = len(prefixed)
num_existing = len(existing)
i = i + 1 # look at the entry after name
while (i < num_existing) and\
(existing[i][:pflen] == prefixed):
child_loggers.append(existing[i])
i = i + 1
existing.remove(name)
try:
self.configure_logger(name, loggers[name])
except StandardError, e:
raise ValueError('Unable to configure logger '
'%r: %s' % (name, e))
#Disable any old loggers. There's no point deleting
#them as other threads may continue to hold references
#and by disabling them, you stop them doing any logging.
#However, don't disable children of named loggers, as that's
#probably not what was intended by the user.
for log in existing:
logger = root.manager.loggerDict[log]
if log in child_loggers:
logger.level = logging.NOTSET
logger.handlers = []
logger.propagate = True
elif disable_existing:
logger.disabled = True
# And finally, do the root logger
root = config.get('root', None)
if root:
try:
self.configure_root(root)
except StandardError, e:
raise ValueError('Unable to configure root '
'logger: %s' % e)
finally:
logging._releaseLock()
def configure_formatter(self, config):
"""Configure a formatter from a dictionary."""
if '()' in config:
factory = config['()'] # for use in exception handler
try:
result = self.configure_custom(config)
except TypeError, te:
if "'format'" not in str(te):
raise
#Name of parameter changed from fmt to format.
#Retry with old name.
#This is so that code can be used with older Python versions
#(e.g. by Django)
config['fmt'] = config.pop('format')
config['()'] = factory
result = self.configure_custom(config)
else:
fmt = config.get('format', None)
dfmt = config.get('datefmt', None)
result = logging.Formatter(fmt, dfmt)
return result
def configure_filter(self, config):
"""Configure a filter from a dictionary."""
if '()' in config:
result = self.configure_custom(config)
else:
name = config.get('name', '')
result = logging.Filter(name)
return result
def add_filters(self, filterer, filters):
"""Add filters to a filterer from a list of names."""
for f in filters:
try:
filterer.addFilter(self.config['filters'][f])
except StandardError, e:
raise ValueError('Unable to add filter %r: %s' % (f, e))
def configure_handler(self, config):
"""Configure a handler from a dictionary."""
formatter = config.pop('formatter', None)
if formatter:
try:
formatter = self.config['formatters'][formatter]
except StandardError, e:
raise ValueError('Unable to set formatter '
'%r: %s' % (formatter, e))
level = config.pop('level', None)
filters = config.pop('filters', None)
if '()' in config:
c = config.pop('()')
if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
c = self.resolve(c)
factory = c
else:
klass = self.resolve(config.pop('class'))
#Special case for handler which refers to another handler
if issubclass(klass, logging.handlers.MemoryHandler) and\
'target' in config:
try:
config['target'] = self.config['handlers'][config['target']]
except StandardError, e:
raise ValueError('Unable to set target handler '
'%r: %s' % (config['target'], e))
elif issubclass(klass, logging.handlers.SMTPHandler) and\
'mailhost' in config:
config['mailhost'] = self.as_tuple(config['mailhost'])
elif issubclass(klass, logging.handlers.SysLogHandler) and\
'address' in config:
config['address'] = self.as_tuple(config['address'])
factory = klass
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
try:
result = factory(**kwargs)
except TypeError, te:
if "'stream'" not in str(te):
raise
#The argument name changed from strm to stream
#Retry with old name.
#This is so that code can be used with older Python versions
#(e.g. by Django)
kwargs['strm'] = kwargs.pop('stream')
result = factory(**kwargs)
if formatter:
result.setFormatter(formatter)
if level is not None:
result.setLevel(_checkLevel(level))
if filters:
self.add_filters(result, filters)
return result
def add_handlers(self, logger, handlers):
"""Add handlers to a logger from a list of names."""
for h in handlers:
try:
logger.addHandler(self.config['handlers'][h])
except StandardError, e:
raise ValueError('Unable to add handler %r: %s' % (h, e))
def common_logger_config(self, logger, config, incremental=False):
"""
Perform configuration which is common to root and non-root loggers.
"""
level = config.get('level', None)
if level is not None:
logger.setLevel(_checkLevel(level))
if not incremental:
#Remove any existing handlers
for h in logger.handlers[:]:
logger.removeHandler(h)
handlers = config.get('handlers', None)
if handlers:
self.add_handlers(logger, handlers)
filters = config.get('filters', None)
if filters:
self.add_filters(logger, filters)
def configure_logger(self, name, config, incremental=False):
"""Configure a non-root logger from a dictionary."""
logger = logging.getLogger(name)
self.common_logger_config(logger, config, incremental)
propagate = config.get('propagate', None)
if propagate is not None:
logger.propagate = propagate
def configure_root(self, config, incremental=False):
"""Configure a root logger from a dictionary."""
root = logging.getLogger()
self.common_logger_config(root, config, incremental)
dictConfigClass = DictConfigurator
def dictConfig(config):
"""Configure logging using a dictionary."""
dictConfigClass(config).configure() | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/third_party/dictconfig.py | dictconfig.py |
import os
import sys
import time
import fnmatch
import tempfile
import tarfile
from distutils import log
try:
from site import USER_SITE
except ImportError:
USER_SITE = None
try:
import subprocess
def _python_cmd(*args):
args = (sys.executable,) + args
return subprocess.call(args) == 0
except ImportError:
# will be used for python 2.3
def _python_cmd(*args):
args = (sys.executable,) + args
# quoting arguments if windows
if sys.platform == 'win32':
def quote(arg):
if ' ' in arg:
return '"%s"' % arg
return arg
args = [quote(arg) for arg in args]
return os.spawnl(os.P_WAIT, sys.executable, *args) == 0
DEFAULT_VERSION = "0.6.13"
DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/"
SETUPTOOLS_FAKED_VERSION = "0.6c11"
SETUPTOOLS_PKG_INFO = """\
Metadata-Version: 1.0
Name: setuptools
Version: %s
Summary: xxxx
Home-page: xxx
Author: xxx
Author-email: xxx
License: xxx
Description: xxx
""" % SETUPTOOLS_FAKED_VERSION
def _install(tarball):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# installing
log.warn('Installing Distribute')
if not _python_cmd('setup.py', 'install'):
log.warn('Something went wrong during the installation.')
log.warn('See the error message above.')
finally:
os.chdir(old_wd)
def _build_egg(egg, tarball, to_dir):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# building an egg
log.warn('Building a Distribute egg in %s', to_dir)
_python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
finally:
os.chdir(old_wd)
# returning the result
log.warn(egg)
if not os.path.exists(egg):
raise IOError('Could not build the egg.')
def _do_download(version, download_base, to_dir, download_delay):
egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg'
% (version, sys.version_info[0], sys.version_info[1]))
if not os.path.exists(egg):
tarball = download_setuptools(version, download_base,
to_dir, download_delay)
_build_egg(egg, tarball, to_dir)
sys.path.insert(0, egg)
import setuptools
setuptools.bootstrap_install_from = egg
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, download_delay=15, no_fake=True):
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
was_imported = 'pkg_resources' in sys.modules or \
'setuptools' in sys.modules
try:
try:
import pkg_resources
if not hasattr(pkg_resources, '_distribute'):
if not no_fake:
_fake_setuptools()
raise ImportError
except ImportError:
return _do_download(version, download_base, to_dir, download_delay)
try:
pkg_resources.require("distribute>="+version)
return
except pkg_resources.VersionConflict:
e = sys.exc_info()[1]
if was_imported:
sys.stderr.write(
"The required version of distribute (>=%s) is not available,\n"
"and can't be installed while this script is running. Please\n"
"install a more recent version first, using\n"
"'easy_install -U distribute'."
"\n\n(Currently using %r)\n" % (version, e.args[0]))
sys.exit(2)
else:
del pkg_resources, sys.modules['pkg_resources'] # reload ok
return _do_download(version, download_base, to_dir,
download_delay)
except pkg_resources.DistributionNotFound:
return _do_download(version, download_base, to_dir,
download_delay)
finally:
if not no_fake:
_create_fake_setuptools_pkg_info(to_dir)
def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, delay=15):
"""Download distribute from a specified location and return its filename
`version` should be a valid distribute version number that is available
as an egg for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download
attempt.
"""
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
tgz_name = "distribute-%s.tar.gz" % version
url = download_base + tgz_name
saveto = os.path.join(to_dir, tgz_name)
src = dst = None
if not os.path.exists(saveto): # Avoid repeated downloads
try:
log.warn("Downloading %s", url)
src = urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = src.read()
dst = open(saveto, "wb")
dst.write(data)
finally:
if src:
src.close()
if dst:
dst.close()
return os.path.realpath(saveto)
def _no_sandbox(function):
def __no_sandbox(*args, **kw):
try:
from setuptools.sandbox import DirectorySandbox
if not hasattr(DirectorySandbox, '_old'):
def violation(*args):
pass
DirectorySandbox._old = DirectorySandbox._violation
DirectorySandbox._violation = violation
patched = True
else:
patched = False
except ImportError:
patched = False
try:
return function(*args, **kw)
finally:
if patched:
DirectorySandbox._violation = DirectorySandbox._old
del DirectorySandbox._old
return __no_sandbox
def _patch_file(path, content):
"""Will backup the file then patch it"""
existing_content = open(path).read()
if existing_content == content:
# already patched
log.warn('Already patched.')
return False
log.warn('Patching...')
_rename_path(path)
f = open(path, 'w')
try:
f.write(content)
finally:
f.close()
return True
_patch_file = _no_sandbox(_patch_file)
def _same_content(path, content):
return open(path).read() == content
def _rename_path(path):
new_name = path + '.OLD.%s' % time.time()
log.warn('Renaming %s into %s', path, new_name)
os.rename(path, new_name)
return new_name
def _remove_flat_installation(placeholder):
if not os.path.isdir(placeholder):
log.warn('Unkown installation at %s', placeholder)
return False
found = False
for file in os.listdir(placeholder):
if fnmatch.fnmatch(file, 'setuptools*.egg-info'):
found = True
break
if not found:
log.warn('Could not locate setuptools*.egg-info')
return
log.warn('Removing elements out of the way...')
pkg_info = os.path.join(placeholder, file)
if os.path.isdir(pkg_info):
patched = _patch_egg_dir(pkg_info)
else:
patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO)
if not patched:
log.warn('%s already patched.', pkg_info)
return False
# now let's move the files out of the way
for element in ('setuptools', 'pkg_resources.py', 'site.py'):
element = os.path.join(placeholder, element)
if os.path.exists(element):
_rename_path(element)
else:
log.warn('Could not find the %s element of the '
'Setuptools distribution', element)
return True
_remove_flat_installation = _no_sandbox(_remove_flat_installation)
def _after_install(dist):
log.warn('After install bootstrap.')
placeholder = dist.get_command_obj('install').install_purelib
_create_fake_setuptools_pkg_info(placeholder)
def _create_fake_setuptools_pkg_info(placeholder):
if not placeholder or not os.path.exists(placeholder):
log.warn('Could not find the install location')
return
pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1])
setuptools_file = 'setuptools-%s-py%s.egg-info' % \
(SETUPTOOLS_FAKED_VERSION, pyver)
pkg_info = os.path.join(placeholder, setuptools_file)
if os.path.exists(pkg_info):
log.warn('%s already exists', pkg_info)
return
log.warn('Creating %s', pkg_info)
f = open(pkg_info, 'w')
try:
f.write(SETUPTOOLS_PKG_INFO)
finally:
f.close()
pth_file = os.path.join(placeholder, 'setuptools.pth')
log.warn('Creating %s', pth_file)
f = open(pth_file, 'w')
try:
f.write(os.path.join(os.curdir, setuptools_file))
finally:
f.close()
_create_fake_setuptools_pkg_info = _no_sandbox(_create_fake_setuptools_pkg_info)
def _patch_egg_dir(path):
# let's check if it's already patched
pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
if os.path.exists(pkg_info):
if _same_content(pkg_info, SETUPTOOLS_PKG_INFO):
log.warn('%s already patched.', pkg_info)
return False
_rename_path(path)
os.mkdir(path)
os.mkdir(os.path.join(path, 'EGG-INFO'))
pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
f = open(pkg_info, 'w')
try:
f.write(SETUPTOOLS_PKG_INFO)
finally:
f.close()
return True
_patch_egg_dir = _no_sandbox(_patch_egg_dir)
def _before_install():
log.warn('Before install bootstrap.')
_fake_setuptools()
def _under_prefix(location):
if 'install' not in sys.argv:
return True
args = sys.argv[sys.argv.index('install')+1:]
for index, arg in enumerate(args):
for option in ('--root', '--prefix'):
if arg.startswith('%s=' % option):
top_dir = arg.split('root=')[-1]
return location.startswith(top_dir)
elif arg == option:
if len(args) > index:
top_dir = args[index+1]
return location.startswith(top_dir)
if arg == '--user' and USER_SITE is not None:
return location.startswith(USER_SITE)
return True
def _fake_setuptools():
log.warn('Scanning installed packages')
try:
import pkg_resources
except ImportError:
# we're cool
log.warn('Setuptools or Distribute does not seem to be installed.')
return
ws = pkg_resources.working_set
try:
setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools',
replacement=False))
except TypeError:
# old distribute API
setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools'))
if setuptools_dist is None:
log.warn('No setuptools distribution found')
return
# detecting if it was already faked
setuptools_location = setuptools_dist.location
log.warn('Setuptools installation detected at %s', setuptools_location)
# if --root or --preix was provided, and if
# setuptools is not located in them, we don't patch it
if not _under_prefix(setuptools_location):
log.warn('Not patching, --root or --prefix is installing Distribute'
' in another location')
return
# let's see if its an egg
if not setuptools_location.endswith('.egg'):
log.warn('Non-egg installation')
res = _remove_flat_installation(setuptools_location)
if not res:
return
else:
log.warn('Egg installation')
pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO')
if (os.path.exists(pkg_info) and
_same_content(pkg_info, SETUPTOOLS_PKG_INFO)):
log.warn('Already patched.')
return
log.warn('Patching...')
# let's create a fake egg replacing setuptools one
res = _patch_egg_dir(setuptools_location)
if not res:
return
log.warn('Patched done.')
_relaunch()
def _relaunch():
log.warn('Relaunching...')
# we have to relaunch the process
# pip marker to avoid a relaunch bug
if sys.argv[:3] == ['-c', 'install', '--single-version-externally-managed']:
sys.argv[0] = 'setup.py'
args = [sys.executable] + sys.argv
sys.exit(subprocess.call(args))
def _extractall(self, path=".", members=None):
"""Extract all members from the archive to the current working
directory and set owner, modification time and permissions on
directories afterwards. `path' specifies a different directory
to extract to. `members' is optional and must be a subset of the
list returned by getmembers().
"""
import copy
import operator
from tarfile import ExtractError
directories = []
if members is None:
members = self
for tarinfo in members:
if tarinfo.isdir():
# Extract directories with a safe mode.
directories.append(tarinfo)
tarinfo = copy.copy(tarinfo)
tarinfo.mode = 448 # decimal for oct 0700
self.extract(tarinfo, path)
# Reverse sort directories.
if sys.version_info < (2, 4):
def sorter(dir1, dir2):
return cmp(dir1.name, dir2.name)
directories.sort(sorter)
directories.reverse()
else:
directories.sort(key=operator.attrgetter('name'), reverse=True)
# Set correct owner, mtime and filemode on directories.
for tarinfo in directories:
dirpath = os.path.join(path, tarinfo.name)
try:
self.chown(tarinfo, dirpath)
self.utime(tarinfo, dirpath)
self.chmod(tarinfo, dirpath)
except ExtractError:
e = sys.exc_info()[1]
if self.errorlevel > 1:
raise
else:
self._dbg(1, "tarfile: %s" % e)
def main(argv, version=DEFAULT_VERSION):
"""Install or upgrade setuptools and EasyInstall"""
tarball = download_setuptools()
_install(tarball)
if __name__ == '__main__':
main(sys.argv[1:]) | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/third_party/distribute_setup.py | distribute_setup.py |
import os
import threading
import multiprocessing
from multiprocessing import Pool, Manager
from agatsuma import Settings, log
from agatsuma.adaptations.abclasses import ABCStrictMetaVerbose, abstractmethod
from agatsuma.core import Core
from agatsuma.interfaces import IPoolEventSpell
from agatsuma.interfaces import AbstractCoreExtension
"""
Base core extension providing pool of worker processes and able to
notify them about settings changes.
.. warning:: If you want to change settings from worker threads you should call :meth:`agatsuma.core.MPCore.start_settings_updater` recently after core initialization.
``MPCore`` uses timer for updating settings in main process. It may be not really
good if you using Agatsuma with another library which provides periodic
callbacks. If so you should override method :meth:`agatsuma.core.MPCore.start_settings_updater`
in core subclass and don't spawn unwanted thread.
.. note:: The only way to shutdown multiprocessing application correctly from another application is sending of ``SIGTERM`` signal to main process.
def start_settings_updater(self):
Initiates periodic checking for config updates. May be overriden in
subclasses
"""
class MultiprocessingCoreExtension(AbstractCoreExtension):
__metaclass__ = ABCStrictMetaVerbose
config_update_manager = None
shared_config_data = None
pids = None
def init(self, core, app_directories, appConfig, kwargs):
multiprocessing.get_logger() # this logger should be configured in config
manager = Manager()
MultiprocessingCoreExtension.config_update_manager = manager
MultiprocessingCoreExtension.shared_config_data = manager.dict()
MultiprocessingCoreExtension.pids = manager.list()
spell_directories = []
nsFragments = ('agatsuma', 'spells', 'supplemental', 'mp')
spell_directories.extend ([core.internal_spell_space(*nsFragments)
])
spell_directories.extend(kwargs.get('spell_directories', []))
kwargs['spell_directories'] = spell_directories
return (app_directories, appConfig, kwargs)
def additional_methods(self):
return [("start_settings_updater", self.start_settings_updater),
("remember_pid", self.remember_pid),
("write_pid", self.write_pid)
]
@staticmethod
def name():
return "multiprocessing"
def on_core_post_configure(self, core):
"""
spell_directories = []
nsFragments = ('agatsuma', 'framework', 'tornado', 'spells')
spell_directories.extend ([self.internal_spell_space(*nsFragments)
])
"""
MultiprocessingCoreExtension.removePidFile()
log.mpcore.info("Calling pre-pool-init routines...")
#self._pre_pool_init() # TODO: XXX:
poolEventSpells = core.spellbook.implementations_of(IPoolEventSpell)
for spell in poolEventSpells:
spell.pre_pool_init(core)
core.pool = None
workers = Settings.mpcore.workers
if workers >= 0:
log.mpcore.debug("Starting %d workers..." % workers)
core.pool = Pool(processes=workers,
initializer = _worker_initializer,
initargs = (Settings.mpcore.settings_update_timeout, ))
else:
log.mpcore.info("Pool initiation skipped due negative workers count")
log.mpcore.info("Calling post-pool-init routines...")
for spell in poolEventSpells:
spell.post_pool_init(core)
self.pool = core.pool
def on_core_stop(self, core):
if core.pool:
core.pool.close()
self.removePidFile()
################################################
# TODO: XXX: one method enough
@staticmethod
def remember_pid(pid):
assert type(pid) is int
MultiprocessingCoreExtension.pids.append(pid)
@staticmethod
def write_pid(pid):
log.mpcore.debug("Writing PID %d" % pid)
mode = "a+"
pidfile = Settings.mpcore.pidfile
if not os.path.exists(pidfile):
mode = "w+"
f = open(pidfile, mode)
f.write("%d\n" % pid)
f.close()
@staticmethod
def removePidFile():
log.mpcore.debug("Removing pidfile...")
pidfile = Settings.mpcore.pidfile
if os.path.exists(pidfile):
os.remove(pidfile)
def start_settings_updater(self):
""" Initiates periodic checking for config updates. May be overriden in
subclasses """
if self.pool:
self._start_settings_updater()
else:
log.mpcore.info("Settings updater is not started in main thread due empty process' pool")
@abstractmethod
def _start_settings_updater(self):
pass
@staticmethod
def _update_settings():
# Settings in current thread are in old state
# If we detect, that shared object has updated config we replace it
process = multiprocessing.current_process()
thread = threading.currentThread()
log.mpcore.info("Checking for config updates in process '%s' with PID %s using thread '%s'..."
% (str(process.name), process.pid, thread.getName()))
prevUpdate = Settings.configData['update']
lastUpdate = MultiprocessingCoreExtension.shared_config_data['update']
if (prevUpdate < lastUpdate):
process = multiprocessing.current_process()
thread = threading.currentThread()
log.mpcore.info("Process '%s' with PID %s received new config, updating using thread '%s'..."
% (str(process.name), process.pid, thread.getName()))
#Core.settings.parse_settings(Core.shared_config_data['data'], Settings.descriptors)
Settings.set_config_data(MultiprocessingCoreExtension.shared_config_data['data'], update_shared = False)
def _worker_initializer(timeout):
process = multiprocessing.current_process()
MultiprocessingCoreExtension.remember_pid(process.pid)
MultiprocessingCoreExtension.write_pid(process.pid)
log.mpcore.debug("Initializing worker process '%s' with PID %d. Starting config update checker with %ds timeout" % (str(process.name), process.pid, timeout))
MPStandaloneExtension._update_settings_by_timer(timeout)
class MPStandaloneExtension(MultiprocessingCoreExtension):
def _start_settings_updater(self):
MPStandaloneExtension._update_settings_by_timer(Settings.mpcore.settings_update_timeout)
@staticmethod
def _update_settings_by_timer(timeout):
if Core.instance.shutdown:
return
threading.Timer(timeout, MPStandaloneExtension._update_settings_by_timer, (timeout, )).start()
MultiprocessingCoreExtension._update_settings() | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/core/mp_core.py | mp_core.py |
import os
import re
import signal
from agatsuma import Enumerator
from agatsuma import log
from agatsuma import Settings, LightweightSettings
from agatsuma import Spellbook
major_version = 0
minor_version = 2
try:
from agatsuma.version import commits_count, branch_id, commit_id
except Exception, e:
print "Cannot obtain version information: %s" % str(e)
commits_count = 0
branch_id = "branch"
commit_id = "commit"
def up(p):
return os.path.split(p)[0]
class Core(object):
"""Base core which provides basic services, such as settings
and also able to enumerate spells.
:param app_directories: list of paths to directories containing application spells.
.. note:: All the paths in ``app_directories`` list must define importable namespaces. So if we replace all '/' with '.' in such path we should get importable namespace
.. note:: app_directories also may contain tuples with two values (``dir``, ``ns``) where ``ns`` is namespace corresponding to directory ``dir`` but it's not recommended to use this feature.
:param appConfig: path to JSON file with application settings
The following kwargs parameters are supported:
#. `app_name` : Application name
#. `application_spells` : names of namespaces to search spells inside
#. `spell_directories` : additional (to `app_directory`) directories to search spells inside
.. attribute:: instance
The core instance. Only one core may be instantiated during application
lifetime.
.. attribute:: version_string
Full Agatsuma version including commit identifier and branch.
May be extracted from GIT repository with `getversion` script.
.. attribute:: internal_state
Dict. For now contains only the key ``mode`` with value ``setup`` when core
was started from setup.py and ``normal`` otherwise.
.. attribute:: agatsuma_base_dir
Path to directory which contains Agatsuma. This directory makes Agatsuma's
namespaces available when added into ``PYTHONPATH``.
"""
instance = None
version_string = "%d.%d.%d.%s.%s" % (major_version, minor_version, commits_count, branch_id, commit_id)
internal_state = {"mode":"normal"}
agatsuma_base_dir = up(up(os.path.realpath(os.path.dirname(__file__))))
@staticmethod
def internal_spell_space(*fragments):
basePath = os.path.join(Core.agatsuma_base_dir, *fragments)
baseNS = '.'.join(fragments)
return (basePath, baseNS)
def __init__(self, app_directories, appConfig, **kwargs):
assert Core.instance is None
Core.instance = self
self.app_name = kwargs.get("app_name", None)
self.application_spells = kwargs.get("application_spells", [])
self.spell_directories = kwargs.get("spell_directories", [])
self.app_mode = kwargs.get("app_mode", "normal")
Core.internal_state["mode"] = self.app_mode
LightweightSettings.load_config(appConfig)
log.new_logger("core")
log.new_logger("storage")
log.core.info("Initializing Agatsuma")
log.core.info("Version: %s" % self.version_string)
log.core.info("Agatsuma's base directory: %s" % self.agatsuma_base_dir)
self.shutdown = False
self.extensions = []
coreExtensions = kwargs.get("core_extensions", [])
for extensionClass in coreExtensions:
log.core.info("Instantiating core extension '%s'..." % extensionClass.name())
extension = extensionClass()
(app_directories, appConfig, kwargs) = extension.init(self, app_directories, appConfig, kwargs)
methods = extension.additional_methods()
for method_name, method in methods:
setattr(self, method_name, method)
log.core.debug("Extension method '%s' added to core's interface" % method_name)
self.extensions.append(extension)
#self.spells = []
#self.spellbook = {}
self.spellbook = Spellbook()
self.registered_settings = {}
self.entry_points = {}
#self.global_filters_list = [] #TODO: templating and this
forbidden_spells = kwargs.get("forbidden_spells", [])
enumerator = Enumerator(self, app_directories, forbidden_spells)
self.spell_directories.append(self.internal_spell_space('agatsuma', 'spells', 'common'))
enumerator.enumerate_spells(self.application_spells, self.spell_directories)
if appConfig:
if self.app_mode == 'normal':
from agatsuma.interfaces.abstract_spell import AbstractSpell
log.core.info("Initializing spells...")
allTheSpells = self.spellbook.implementations_of(AbstractSpell)
for spell in allTheSpells:
spell.pre_configure(self)
# Replace early config with full-featured one
Settings.load_config(appConfig, self.registered_settings)
#self.logger.update_levels()
log.core.info("Calling post-configure routines...")
for spell in allTheSpells:
spell.post_configure(self)
log.core.info("Spells initialization completed")
self._post_configure()
enumerator.eagerUnload()
else:
log.core.critical("Config path is None")
log.core.info("Initialization completed")
signal.signal(signal.SIGTERM, self._signal_handler)
def _stop(self):
"""
Empty virtual function intended to be overriden in subclasses.
Runs before core shutdown.
"""
for extension in self.extensions:
extension.on_core_stop(self)
def _post_configure(self):
for extension in self.extensions:
extension.on_core_post_configure(self)
def _signal_handler(self, signum, frame):
log.core.debug("Received signal %d" % signum)
self.stop()
def stop(self):
"""
This method is intended to stop core. Subclasses may override method
:meth:`agatsuma.core.Core._stop` to perform some cleanup actions here.
"""
log.core.info("Stopping Agatsuma...")
self.shutdown = True
self._stop()
def register_option(self, settingName, settingType, settingComment):
""" This function must be called from
:meth:`agatsuma.interfaces.AbstractSpell.pre_configure`
**TODO**
:param settingName: String contains of two *group name* and *option name* separated with dot (``group.option`` for example). Option will be threated as read-only if the string begins with exclamation mark.
:param settingType: type for option value. Allowed all types compatible with JSON.
:param settingComment: string with human-readable description for option
See also **TODO**
"""
if not getattr(self, "settingRe", None):
self.settingRe = re.compile(r"^(!{0,1})((\w+)\.{0,1}(\w+))$")
match = self.settingRe.match(settingName)
if match:
settingDescr = (match.group(3),
match.group(4),
bool(match.group(1)),
settingType,
settingComment,
)
fqn = match.group(2)
if fqn in self.registered_settings:
raise Exception("Setting is already registered: '%s' (%s)" % (fqn, settingComment))
self.registered_settings[fqn] = settingDescr
else:
raise Exception("Bad setting name: '%s' (%s)" % (settingName, settingComment))
def register_entry_point(self, entry_pointId, epFn):
""" This method is intended to register *entry points*.
Entry point is arbitrary function which receives
arbitrary argumets list. Entry point may be called via
:meth:`agatsuma.core.Core.run_entry_point`. Core and services are fully initialized when
entry point became available, so it may be used to perform
different tasks that requires fully initialized environment such
as database cleanup or something else.
"""
if not entry_pointId in self.entry_points:
self.entry_points[entry_pointId] = epFn
else:
raise Exception("Entry point with name '%s' is already registered" % entry_pointId)
def run_entry_point(self, name, *args, **kwargs):
""" This method runs registered entry point with given `name`
with arguments `*args` and `**kwargs`.
You should manually call this method from your application code when
you need to run entry point.
Basic Agatsuma's services provides several usable
:ref:`entry points<std-entry-points>`.
"""
self.entry_points[name](*args, **kwargs) | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/core/base_core.py | base_core.py |
class DictAccessProxy(object):
def __init__(self, source_dict):
object.__setattr__(self, '_DictAccessProxy__dict', source_dict)
def __getattr__(self, name):
stored_dict = object.__getattribute__(self, '_DictAccessProxy__dict')
if name in stored_dict:
return stored_dict[name]
else:
return object.__getattribute__(self, name)
def __repr__(self):
return str("<Dict proxy: %s>" % self.__dict)
class SettingsGroupProxy(DictAccessProxy):
def __init__(self, source_dict, group_name, readonly_list, types, comments, update_callback):
DictAccessProxy.__init__(self, source_dict)
self.__readonly_list = readonly_list
self.__groupName = group_name
self.__types = types
self.__comments = comments
self.__update_callback = update_callback
def __setattr__(self, name, value):
sdict = object.__getattribute__(self, '_DictAccessProxy__dict')
if name in sdict:
if name in self.__readonly_list:
raise Exception("Option '%s.%s' is read-only" % (self.__group_name, name))
elif name in self.__types and type(value) != self.__types[name]:
raise Exception("Option '%s.%s' must have type %s, but %s tried to assign" %
(self.__group_name,
name,
self.__types[name],
type(value),
)
)
else:
sdict[name] = value # dict is reference to original dict
self.__update_callback()
else:
object.__setattr__(self, name, value)
def to_dict(self):
ret = {}
sdict = object.__getattribute__(self, '_DictAccessProxy__dict')
ret.update(sdict)
return ret
def __repr__(self):
return str("<Settings group '%s': %s>" % (self.__group_name, self.__dict)) | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/infrastructure/access_proxy.py | access_proxy.py |
import datetime
import multiprocessing
import threading
#from agatsuma import log, Spell, Implementations
from agatsuma.interfaces import AbstractSpell
from settings_meta import SettingsMeta
from loggers import log
def fix_string_type(obj, expected_type):
str_types = [unicode, str]
if type(obj) in str_types and expected_type in str_types:
return expected_type(obj)
else:
return obj
class Settings(object):
__metaclass__ = SettingsMeta
settings = {}
@staticmethod
def load_cfg_data(config_data, descriptors):
Settings.process_settings(config_data, descriptors)
@staticmethod
def process_settings(settings, descriptors):
settings = Settings.provider.load(settings)
problems = []
newsettings = {}
rosettings = {}
types = {}
comments = {}
actual = 0
rocount = 0
for group, name, ro, stype, comment in descriptors.values():
if not group in settings:
problems.append("Group '%s' (%s) not found in settings" %
(group, comment))
continue
groupDict = settings[group]
if not name in groupDict:
problems.append("Setting '%s' (%s) not found in group '%s'" %
(name, comment, group))
continue
value = fix_string_type(groupDict[name], stype)
rstype = type(value)
fullname = '%s.%s' % (group, name)
if rstype != stype:
problems.append("Setting '%s' (%s) has incorrect type '%s' instead of '%s'" %
(fullname, comment, str(rstype), str(stype)))
continue
if not group in newsettings:
newsettings[group] = {}
types[group] = {}
comments[group] = {}
rosettings[group] = []
newsettings[group][name] = value
types[group][name] = stype
comments[group][name] = comment
if ro:
rosettings[group].append(name)
rocount += 1
actual += 1
if problems:
log.settings.error('\n'.join(problems))
raise Exception("Can't load settings")
# TODO: XXX: actual == total?
log.settings.info('%d settings found in config, %d are actual (%d read-only)' % (len(descriptors), actual, rocount))
Settings.readonly_settings = rosettings
Settings.types = types
Settings.comments = comments
Settings.descriptors = descriptors
Settings.set_config_data(newsettings)
@staticmethod
def set_config_data(settings, **kwargs):
from agatsuma.core import Core
process = multiprocessing.current_process()
thread = threading.currentThread()
log.settings.info("Installing new config data in process '%s' with PID %d using thread '%s'" %
(str(process.name), process.pid, thread.getName()))
timestamp = datetime.datetime.now()
try:
Settings.config_lock.acquire()
Settings.settings.update(settings)
finally:
Settings.config_lock.release()
if Settings.core.debug > 0:
log.settings.debug("Updated config: %s" % str(Settings.settings))
Settings.configData = {"data": settings,
"update" : timestamp,
}
spells = Core.instance.spellbook.implementations_of(AbstractSpell)
for spell in spells:
spell.post_config_update(**kwargs)
@staticmethod
def update_callback():
Settings.set_config_data(Settings.settings) | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/infrastructure/settings.py | settings.py |
import os
from agatsuma.core import Core
if Core.internal_state.get("mode", None) == "normal":
from pylons.configuration import PylonsConfig
from pylons.error import handle_mako_error
from pylons.middleware import ErrorHandler, StatusCodeRedirect
from pylons.wsgiapp import PylonsApp
from paste.cascade import Cascade
from paste.registry import RegistryManager
from paste.urlparser import StaticURLParser
from paste.deploy.converters import asbool
from beaker.middleware import SessionMiddleware
from routes.middleware import RoutesMiddleware
from routes import Mapper
from mako.lookup import TemplateLookup
from agatsuma import Implementations, log
from agatsuma.web.pylons.interfaces import IMiddlewareSpell, IHandlingSpell
class PylonsAdaptor(object):
"""
"""
def __init__(self, **kwargs):
"""
"""
if Core.internal_state.get("mode", None) == "normal":
log.pcore.debug("Initializing Pylons...")
pylonsRoot = kwargs['pylons_root']
global_conf = kwargs['global_conf']
app_conf = kwargs['app_conf']
#app_name = kwargs['app_name']
helpers = kwargs['helpers']
GlobalsClass = kwargs['globals_class']
config = self._load_environment(pylonsRoot,
global_conf, app_conf,
GlobalsClass, helpers)
full_stack = kwargs['full_stack']
static_files = kwargs['static_files']
self.app = self._make_app(config, full_stack, static_files)
else:
log.core.warning("Setup mode. Pylons initialization skipped")
self.app = None
def _make_app(self, config, full_stack=True, static_files=True):
# The Pylons WSGI app
log.pcore.debug("Initializing middleware...")
app = PylonsApp(config=config)
# Routing/Session Middleware
app = RoutesMiddleware(app, config['routes.map'], singleton=False)
app = SessionMiddleware(app, config)
# CUSTOM MIDDLEWARE HERE (filtered by error handling middlewares)
spells = Implementations(IMiddlewareSpell)
for spell in spells:
app = spell.add_middleware(app)
if asbool(full_stack):
# Handle Python exceptions
global_conf = config # I think that it's correct, config is slightly modified global_conf
app = ErrorHandler(app, global_conf, **config['pylons.errorware'])
# Display error documents for 401, 403, 404 status codes (and
# 500 when debug is disabled)
if asbool(config['debug']):
app = StatusCodeRedirect(app)
else:
app = StatusCodeRedirect(app, [400, 401, 403, 404, 500])
# Establish the Registry for this application
app = RegistryManager(app)
if asbool(static_files):
# Serve static files
static_app = StaticURLParser(config['pylons.paths']['static_files'])
app = Cascade([static_app, app])
app.config = config
return app
def _load_environment(self, pylonsRoot, global_conf, app_conf, GlobalsClass, helpers):
log.pcore.debug("Loading environment...")
"""Configure the Pylons environment via the ``pylons.config``
object
"""
log.pcore.debug("global_conf for Pylons: %s" % str(global_conf))
log.pcore.debug("app_conf for Pylons: %s" % str(app_conf))
config = PylonsConfig()
# Pylons paths
root = os.path.abspath(pylonsRoot) #os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
paths = dict(root=root,
controllers=os.path.join(root, 'controllers'),
static_files=os.path.join(root, 'public'),
templates=[os.path.join(root, 'templates')])
# Initialize config with the basic options
config.init_app(global_conf,
app_conf,
package=pylonsRoot,
paths=paths)
config['routes.map'] = self._makeMap(config)
config['pylons.app_globals'] = GlobalsClass(config)
config['pylons.h'] = helpers
# Setup cache object as early as possible
import pylons
pylons.cache._push_object(config['pylons.app_globals'].cache)
# Create the Mako TemplateLookup, with the default auto-escaping
config['pylons.app_globals'].mako_lookup = TemplateLookup(
directories=paths['templates'],
error_handler=handle_mako_error,
module_directory=os.path.join(app_conf['cache_dir'], 'templates'),
input_encoding='utf-8',
default_filters=['escape'],
imports=['from webhelpers.html import escape'])
# CONFIGURATION OPTIONS HERE (note: all config options will override
# any Pylons config options)
# TODO: call spells ???
return config
def _makeMap(self, config):
log.pcore.debug("Setting up routes...")
"""Create, configure and return the routes Mapper"""
map = Mapper(directory=config['pylons.paths']['controllers'],
always_scan=config['debug'])
map.minimization = False
map.explicit = False
# The ErrorController route (handles 404/500 error pages); it should
# likely stay at the top, ensuring it can always be resolved
# TODO: ??? Is it really required ???
map.connect('/error/{action}', controller='error')
map.connect('/error/{action}/{id}', controller='error')
# CUSTOM ROUTES HERE
print IHandlingSpell
spells = Implementations(IHandlingSpell)
print ">>>", spells
for spell in spells:
spell.init_routes(map)
for spell in spells:
spell.post_init_routes(map)
return map | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/web/pylons/pylons_adaptor.py | pylons_adaptor.py |
import datetime
import os
from agatsuma import log
from agatsuma import Settings
from agatsuma.web.tornado.interfaces import AbstractSession
class BaseSessionManager(object):
def __init__(self):
pass
def _generate_session_id(cls):
return os.urandom(32).encode('hex')
def _session_doomsday(self, moment):
return moment + datetime.timedelta(seconds=Settings.sessions.expiration_interval)
def new(self, ip, user_agent):
newId = self._generate_session_id()
sess = AbstractSession(newId, {})
sess.fill(ip, user_agent)
return sess
def load(self, sessionId):
sessData = self.load_data(sessionId)
log.sessions.debug("Loaded session %s with data %s loaded" % (sessionId, str(sessData)))
if sessData:
if datetime.datetime.now() >= self._session_doomsday(sessData["timestamp"]):
log.sessions.debug("Session %s expired and destroyed" % sessionId)
self.destroy_data(sessionId)
return None
sess = AbstractSession(sessionId, sessData)
sess.saved = True
return sess
def save(self, session):
session["timestamp"] = datetime.datetime.now()
self.save_data(session.id, session.data)
if session.handler and not session.cookieSent:
log.sessions.debug("Session %s with data %s saved and cookie set" % (session.id, str(session.data)))
session.handler.set_secure_cookie(u"AgatsumaSessId", session.id)
session.cookieSent = True
else:
log.sessions.debug("Session %s with data %s saved but cookie not set" % (session.id, str(session.data)))
session.saved = True
def delete(self, session):
self.destroy_data(session.id)
if session.handler:
session.handler.clear_cookie("AgatsumaSessId")
else:
log.sessions.warning("Session %s with data %s destroyed but cookie not cleared: no handler" % (session.id, str(session.data)))
session.saved = False
def cleanup(self):
"""Deletes sessions with timestamps in the past form storage."""
pass
def destroy_data(self, sessionId):
""" destroys session in storage """
pass
def load_data(self, sessionId):
""" returns session data if exists, otherwise returns None """
pass | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/web/tornado/base_session_manager.py | base_session_manager.py |
import Queue
import multiprocessing
from multiprocessing import Queue as MPQueue
from weakref import WeakValueDictionary
from agatsuma.core import Core
from agatsuma.core import MultiprocessingCoreExtension
if Core.internal_state.get("mode", None) == "normal":
import tornado.httpserver
import tornado.ioloop
import tornado.web
import tornado.wsgi
import tornado
TornadoAppClass = tornado.web.Application
TornadoWSGIClass = tornado.wsgi.WSGIContainer
TornadoVersion = tornado.version
else:
TornadoAppClass = object
TornadoWSGIClass = object
TornadoVersion = None
from agatsuma import Settings
from agatsuma.adaptations.abclasses import ABCStrictMetaVerbose, abstractmethod
from agatsuma import log
from agatsuma.adaptations.mp import MPStreamHandler
class TornadoMPExtension(MultiprocessingCoreExtension):
@staticmethod
def name():
return "tornadomp"
def _start_settings_updater(self):
configChecker = tornado.ioloop.PeriodicCallback(MultiprocessingCoreExtension._update_settings,
1000 * Settings.mpcore.settings_update_timeout,
io_loop=Core.instance.ioloop)
configChecker.start()
supported_tornado_version="0.2"
class TornadoCore(Core):
__metaclass__ = ABCStrictMetaVerbose
mqueue = None
def __init__(self, app_directory, appConfig, **kwargs):
spell_directories = []
nsFragments = ('agatsuma', 'web', 'tornado', 'spells', 'common')
spell_directories.extend ([self.internal_spell_space(*nsFragments)
])
spell_directories.extend(kwargs.get('spell_directories', []))
kwargs['spell_directories'] = spell_directories
extensions = kwargs.get('core_extensions', [])
extensions.append(TornadoMPExtension)
kwargs['core_extensions'] = extensions
Core.__init__(self, app_directory, appConfig, **kwargs)
if TornadoVersion and supported_tornado_version < TornadoVersion:
log.tcore.info("Current Tornado version: %s" %(TornadoVersion, ))
log.tcore.warning("Current Tornado version is not supported: %s>%s" % (TornadoVersion, supported_tornado_version))
def _stop(self):
#self.HTTPServer.stop()
self.ioloop.stop()
Core._stop(self)
def start(self):
self.ioloop = tornado.ioloop.IOLoop.instance()
#self.__updateLogger()
pumpTimeout = Settings.tornado.logger_pump_timeout
handler_instance = MPStreamHandler()
self.logPump = tornado.ioloop.PeriodicCallback(handler_instance.process_log_queue,
pumpTimeout,
io_loop=self.ioloop)
handler_instance.enable_queue()
self.logPump.start()
port = Settings.tornado.port
#self.logger.setMPHandler(self.ioloop)
self.HTTPServer = tornado.httpserver.HTTPServer(self,
xheaders=Settings.tornado.xheaders,
# For future Tornado versions
#ssl_options=Settings.tornado.ssl_parameters
)
"""
# Preforking is only available in Tornado GIT
if Settings.core.forks > 0:
self.HTTPServer.bind(port)
self.HTTPServer.start()
else:
"""
self.HTTPServer.listen(port)
pid = multiprocessing.current_process().pid
self.remember_pid(pid)
self.write_pid(pid)
log.tcore.debug("Main process' PID: %d" % pid)
self.start_settings_updater()
self._before_ioloop_start()
log.tcore.info("=" * 60)
log.tcore.info("Starting %s/Agatsuma in server mode on port %d..." % (self.app_name, port))
log.tcore.info("=" * 60)
self.ioloop.start()
@abstractmethod
def _before_ioloop_start(self):
pass
class TornadoStandaloneCore(TornadoCore, TornadoAppClass):
"""Implements standalone Tornado server, useful to develop
lightweight asynchronous web applications
"""
def __init__(self, app_directory, appConfig, **kwargs):
"""
"""
spell_directories = []
nsFragments = ('agatsuma', 'web', 'tornado', 'spells', 'standalone')
spell_directories.extend ([self.internal_spell_space(*nsFragments)
])
spell_directories.extend(kwargs.get('spell_directories', []))
kwargs['spell_directories'] = spell_directories
self.URIMap = []
TornadoCore.__init__(self, app_directory, appConfig, **kwargs)
self.mpHandlerInstances = WeakValueDictionary()
tornadoSettings = {'debug': Settings.core.debug, # autoreload
'cookie_secret' : str(Settings.tornado.cookie_secret),
}
tornadoSettings.update(Settings.tornado.app_parameters)
assert len(self.URIMap) > 0
tornado.web.Application.__init__(self, self.URIMap, **tornadoSettings)
def _before_ioloop_start(self):
if self.messagePumpNeeded and self.pool:
TornadoCore.mqueue = MPQueue()
pumpTimeout = Settings.tornado.message_pump_timeout
mpump = tornado.ioloop.PeriodicCallback(self._messagePump,
pumpTimeout,
io_loop=self.ioloop)
log.tcore.debug("Starting message pump...")
mpump.start()
else:
log.tcore.debug("Message pump initiation skipped, it isn't required for any spell")
def _messagePump(self):
"""Extracts messages from message queue if any and pass them to
appropriate controller
"""
while not self.mqueue.empty():
try:
message = self.mqueue.get_nowait()
if Settings.tornado.messagepump_debug > 0:
log.tcore.debug("message: '%s'" % str(message))
if message and type(message) is tuple:
handlerId = message[0]
if handlerId in self.mpHandlerInstances:
self.mpHandlerInstances[handlerId].process_message(message)
else:
log.tcore.warning("unknown message recepient: '%s'" % str(message))
else:
log.tcore.warning("bad message: '%s'" % str(message))
except Queue.Empty:
log.tcore.warning("message: raised Queue.Empty")
if self.waitingCallbacks:
try:
for callback in self.waitingCallbacks:
callback()
finally:
self.waitingCallbacks = []
def handlerInitiated(self, handler):
# references are weak, so handler will be correctly destroyed and removed from dict automatically
self.mpHandlerInstances[id(handler)] = handler
class TornadoWSGICore(TornadoCore, TornadoWSGIClass):
"""Implements Tornado WSGI server, useful to run usual WSGI
applications on top of Tornado.
"""
def __init__(self, app_directory, appConfig, **kwargs):
"""
"""
TornadoCore.__init__(self, app_directory, appConfig, **kwargs)
def set_wsgi(self, wsgiapp):
tornado.wsgi.WSGIContainer.__init__(self, wsgiapp)
def _before_ioloop_start(self):
pass | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/web/tornado/tornado_core.py | tornado_core.py |
from multiprocessing import Queue as MPQueue
from agatsuma import log
from agatsuma.interfaces import AbstractSpell, IInternalSpell, ISetupSpell
from agatsuma.interfaces import IPoolEventSpell
from agatsuma.web.tornado.interfaces import IHandlingSpell
from agatsuma.web.tornado import Url
from agatsuma.commons.types import Atom
class TornadoSpell(AbstractSpell, IInternalSpell, ISetupSpell, IPoolEventSpell):
def __init__(self):
config = {'info' : 'Agatsuma Tornado Spell',
'deps' : (),
'eager_unload' : True,
}
AbstractSpell.__init__(self, Atom.agatsuma_tornado_standalone, config)
def pre_configure(self, core):
core.register_option("!tornado.cookie_secret", unicode, "cookie secret")
core.register_option("!tornado.message_pump_timeout", int, "Message pushing interval (msec)")
core.register_option("!tornado.app_parameters", dict, "Kwarg parameters for tornado application")
def __process_url(self, core, url):
if type(url) is tuple:
return url
if type(url) is Url:
core.URITemplates[url.name] = url.template
return (url.regex, url.handler)
raise Exception("Incorrect URL data^ %s" % str(url))
def post_configure(self, core):
log.tcore.info("Initializing URI map..")
spells = core.spellbook.implementations_of(IHandlingSpell)
if spells:
urimap = []
for spell in spells:
spell.init_routes(urimap)
for spell in spells:
spell.post_init_routes(urimap)
core.URIMap = []
core.URITemplates = {}
for url in urimap:
core.URIMap.append(self.__process_url(core, url))
log.tcore.info("URI map initialized")
#log.tcore.debug("URI map:\n%s" % '\n'.join(map(lambda x: str(x), self.core.URIMap)))
log.tcore.debug("URI map:")
for p in core.URIMap:
log.tcore.debug("* %s" % str(p))
else:
raise Exception("Handling spells not found!")
def pre_pool_init(self, core):
# Check if message pump is required for some of controllers
core.messagePumpNeeded = False
from agatsuma.web.tornado import MsgPumpHandler
for uri, handler in core.URIMap:
if issubclass(handler, MsgPumpHandler):
core.messagePumpNeeded = True
core.waitingCallbacks = []
break
if core.messagePumpNeeded:
core.mqueue = MPQueue()
def requirements(self):
return {"tornado" : ["tornado>=0.2"],
} | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/web/tornado/spells/standalone/tornado_standalone.py | tornado_standalone.py |
import datetime
import re
from agatsuma import SpellByStr
from agatsuma import Settings
from agatsuma import log
from agatsuma.interfaces import AbstractSpell, IInternalSpell
from agatsuma.web.tornado.interfaces import IRequestSpell, ISessionHandler
from agatsuma.commons.types import Atom
class SessionSpell(AbstractSpell, IInternalSpell, IRequestSpell):
def __init__(self):
config = {'info' : 'Agatsuma Session Spell',
'deps' : (Atom.agatsuma_tornado, ),
'requires' : (Atom.session_backend, ),
}
AbstractSpell.__init__(self, Atom.agatsuma_session, config)
def pre_configure(self, core):
log.new_logger("sessions")
core.register_option("!sessions.storage_uris", list, "Storage URIs")
core.register_option("!sessions.expiration_interval", int, "Default session length in seconds")
def post_configure(self, core):
log.sessions.info("Initializing Session Storage..")
rex = re.compile(r"^(\w+)\+(.*)$")
self.sessmans = []
for uri in Settings.sessions.storage_uris:
match = rex.match(uri)
if match:
managerId = match.group(1)
uri = match.group(2)
spellName = "tornado_session_backend_%s" % managerId
spell = SpellByStr(spellName)
if spell:
self.sessmans.append(spell.instantiate_backend(uri))
else:
raise Exception("Session backend improperly configured, spell '%s' not found" % spellName)
else:
raise Exception("Incorrect session storage URI")
def save_session(self, session):
for sessman in self.sessmans:
sessman.save(session)
def delete_session(self, session):
for sessman in self.sessmans:
sessman.delete(session)
def before_request_callback(self, handler):
if isinstance(handler, ISessionHandler):
cookie = handler.get_secure_cookie("AgatsumaSessId")
log.sessions.debug("Loading session for %s" % cookie)
session = None
if cookie:
for sessman in self.sessmans:
session = sessman.load(cookie)
if session:
session.handler = handler
# Update timestamp if left time < than elapsed time
timestamp = session["timestamp"]
now = datetime.datetime.now()
elapsed = now - timestamp
left = (sessman._session_doomsday(timestamp)- now)
if elapsed >= left:
log.sessions.debug("Updating timestamp for session %s (E: %s, L: %s)" %
(cookie, str(elapsed), str(left)))
self.save_session(session)
break
if not session:
session = self.sessmans[0].new(handler.request.remote_ip,
handler.request.headers["User-Agent"])
session.handler = handler
self.save_session(session)
handler.session = session
session.sessSpell = self | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/web/tornado/spells/standalone/tornado_session.py | tornado_session.py |
import re
import time
import datetime
try:
import cPickle as pickle
except ImportError:
import pickle
from agatsuma import log
#from agatsuma import Settings
#from agatsuma import Core
from agatsuma import Spell
from agatsuma.interfaces import AbstractSpell, IInternalSpell
from agatsuma.commons.types import Atom
from agatsuma.web.tornado.interfaces import ISessionBackendSpell
from agatsuma.web.tornado import BaseSessionManager
class MemcachedSessionManager(BaseSessionManager):
def __init__(self, uri):
BaseSessionManager.__init__(self)
self.uri = uri
self.init_connection()
def init_connection(self):
log.sessions.info("Initializing Memcached session backend "\
"using URI '%s'" % self.uri)
self.keyprefix = self._parse_memcached_prefix_uri(self.uri)
memcachedSpell = Spell(Atom.agatsuma_memcached)
self.pool = memcachedSpell.get_connection_pool()
@property
def connection(self):
with self.pool.reserve() as mc:
return mc
def _getPrefixedKey(self, sessionId):
if self.keyprefix:
return str("%s_%s" % (self.keyprefix, sessionId))
return sessionId
@staticmethod
def _parse_memcached_prefix_uri(details):
# memprefix://prefixname
match = re.match('^memprefix://(\w+)$', details)
return match.group(1) if match else ''
def cleanup(self):
"""With Memcached as session storage, this function does
not make sense as all keys are saved with expiry time
exactly the same as the session's. Hence Memcached takse
care of cleaning out the garbage."""
pass
def destroy_data(self, sessionId):
if not self.connection.remove(self._getPrefixedKey(sessionId)):
log.sessions.info("Deleting seesion %s failed. It was probably "\
"not set or expired" % sessionId)
def load_data(self, sessionId):
data = self.connection.get(self._getPrefixedKey(sessionId))
if data:
return pickle.loads(data)
def save_data(self, sessionId, data):
expTime = int(time.mktime(
self._session_doomsday(datetime.datetime.now()).timetuple()))
if not self.connection.set(self._getPrefixedKey(sessionId),
pickle.dumps(data), time=expTime):
log.sessions.critical("Saving %s session failed" % sessionId)
class MemcachedSessionSpell(AbstractSpell, IInternalSpell, ISessionBackendSpell):
def __init__(self):
config = {'info' : 'Memcached session storage',
'deps' : (Atom.agatsuma_memcached, ),
'provides' : (Atom.session_backend, )
}
AbstractSpell.__init__(self, Atom.tornado_session_backend_memcached,
config)
def instantiate_backend(self, uri):
self.managerInstance = MemcachedSessionManager(uri)
return self.managerInstance | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/web/tornado/spells/standalone/session_backends/memcached_backend.py | memcached_backend.py |
import re
import time
import datetime
import pymongo
from agatsuma import log, Spell
from agatsuma.interfaces import AbstractSpell, IInternalSpell
from agatsuma.commons.types import Atom
from agatsuma.web.tornado.interfaces import ISessionBackendSpell
from agatsuma.web.tornado import BaseSessionManager
"""
Used code from
http://github.com/milancermak/tornado/blob/master/tornado/session.py
"""
"""
class AutoReconnect(object):
functions = {}
def __init__(self, method):
self.method = method
def __call__(self, *args, **kwargs):
"""
class MongoSessionManager(BaseSessionManager):
def __init__(self, uri):
BaseSessionManager.__init__(self)
self.uri = uri
self.init_connection()
def init_connection(self):
log.sessions.info("Initializing MongoDB session backend using URI '%s'" % self.uri)
connData = MongoSessionManager._parse_mongo_table_uri(self.uri)
mongoSpell = Spell(Atom.agatsuma_mongodb)
self.connection = mongoSpell.connection
self.dbCollection = getattr(mongoSpell, connData[0])
self.db = getattr(self.dbCollection, connData[1])
#self.connection = pymongo.Connection(connData[0], int(connData[1]))
#self.dbSet = self.connection[connData[2]]
#self.db = self.dbSet.sessions
@staticmethod
def _parse_mongo_table_uri(details):
# mongotable://collection/table
match = re.match('^mongotable://(\w+)/(\w+)$', details)
return match.group(1), match.group(2)
def cleanup(self):
self.db.remove({'expires': {'$lte': int(time.time())}})
def destroy_data(self, session_id):
try:
self.db.remove({'session_id': session_id})
self.connection.end_request()
except pymongo.errors.AutoReconnect:
log.sessions.critical("Mongo exception during destroying %s" % session_id)
def load_data(self, session_id):
try:
data = self.db.find_one({'session_id': session_id})
self.connection.end_request()
if data:
return data["data"]
except pymongo.errors.AutoReconnect:
log.sessions.critical("Mongo exception during loading %s" % session_id)
except Exception, e:
log.sessions.critical("Unknown exception during loading: %s" % str(e))
self.connection.end_request()
def save_data(self, session_id, data):
expTime = int(time.mktime(self._session_doomsday(datetime.datetime.now()).timetuple()))
try:
self.db.update(
{'session_id': session_id}, # equality criteria
{'session_id': session_id,
'data': data,
'expires': expTime,
}, # new document
upsert=True)
self.connection.end_request()
except pymongo.errors.AutoReconnect:
log.sessions.critical("Mongo exception during saving %s with data %s" % (session_id, str(data)))
class MongoSessionSpell(AbstractSpell, IInternalSpell, ISessionBackendSpell):
def __init__(self):
config = {'info' : 'MongoDB session storage',
'deps' : (Atom.agatsuma_mongodb, ),
'provides' : (Atom.session_backend, )
}
AbstractSpell.__init__(self, Atom.tornado_session_backend_mongo, config)
def instantiate_backend(self, uri):
self.managerInstance = MongoSessionManager(uri)
return self.managerInstance
def pre_configure(self, core):
core.register_entry_point("mongodb:sessions:cleanup", self.entry_point)
def entry_point(self, *args, **kwargs):
log.core.info("Cleaning old sessions in MongoDB")
self.managerInstance.cleanup() | Agatsuma | /Agatsuma-0.2.176.default.3499b00918ca.tip.tar.gz/Agatsuma-0.2.176.default.3499b00918ca.tip/agatsuma/web/tornado/spells/standalone/session_backends/mongo_backend.py | mongo_backend.py |
from sys import argv
import numpy as np
import cv2
import os
from keras.models import load_model
# Loading the trained CNN model for age classification, and
# defining a list of age-ranges as defined in the model.
# model = load_model("C:/Users/User/OneDrive/Bubblez/Age Detection/age-detection-backend/Age_detection_files/age_detect_cnn_model.h5")
model = load_model('C:\\Users\\User\\OneDrive\\Circles\\age-detection-backend\\src\\age_detect_cnn_model.h5')
age_ranges = ['1-2', '3-9', '10-20', '21-27', '28-45', '46-65', '66-116']
# Importing the Haar Cascades classifier XML file.
face_cascade = cv2.CascadeClassifier("C:\\Users\\User\\OneDrive\\Circles\\age-detection-backend\\src\\haarcascade_frontalface_default.xml")
# Defining a function to shrink the detected face region by a scale for better prediction in the model.
def shrink_face_roi(x, y, w, h, scale=0.9):
wh_multiplier = (1 - scale) / 2
x_new = int(x + (w * wh_multiplier))
y_new = int(y + (h * wh_multiplier))
w_new = int(w * scale)
h_new = int(h * scale)
return (x_new, y_new, w_new, h_new)
# Defining a function to create the predicted age overlay on the image by centering the text.
def create_age_text(img, text, pct_text, x, y, w, h):
# Defining font, scales and thickness.
fontFace = cv2.FONT_HERSHEY_SIMPLEX
text_scale = 1.2
yrsold_scale = 0.7
pct_text_scale = 0.65
# Getting width, height and baseline of age text and "years old".
(text_width, text_height), text_bsln = cv2.getTextSize(text, fontFace=fontFace, fontScale=text_scale, thickness=2)
(yrsold_width, yrsold_height), yrsold_bsln = cv2.getTextSize("years old", fontFace=fontFace, fontScale=yrsold_scale,
thickness=1)
(pct_text_width, pct_text_height), pct_text_bsln = cv2.getTextSize(pct_text, fontFace=fontFace,
fontScale=pct_text_scale, thickness=1)
# Calculating center point coordinates of text background rectangle.
x_center = x + (w / 2)
y_text_center = y + h + 20
y_yrsold_center = y + h + 48
y_pct_text_center = y + h + 75
# Calculating bottom left corner coordinates of text based on text size and center point of background rectangle
# calculated above.
x_text_org = int(round(x_center - (text_width / 2)))
y_text_org = int(round(y_text_center + (text_height / 2)))
x_yrsold_org = int(round(x_center - (yrsold_width / 2)))
y_yrsold_org = int(round(y_yrsold_center + (yrsold_height / 2)))
x_pct_text_org = int(round(x_center - (pct_text_width / 2)))
y_pct_text_org = int(round(y_pct_text_center + (pct_text_height / 2)))
face_age_background = cv2.rectangle(img, (x - 1, y + h), (x + w + 1, y + h + 94), (0, 100, 0), cv2.FILLED)
face_age_text = cv2.putText(img, text, org=(x_text_org, y_text_org), fontFace=fontFace, fontScale=text_scale,
thickness=2, color=(255, 255, 255), lineType=cv2.LINE_AA)
yrsold_text = cv2.putText(img, "years old", org=(x_yrsold_org, y_yrsold_org), fontFace=fontFace,
fontScale=yrsold_scale, thickness=1, color=(255, 255, 255), lineType=cv2.LINE_AA)
pct_age_text = cv2.putText(img, pct_text, org=(x_pct_text_org, y_pct_text_org), fontFace=fontFace,
fontScale=pct_text_scale, thickness=1, color=(255, 255, 255), lineType=cv2.LINE_AA)
return (face_age_background, face_age_text, yrsold_text)
# Defining a function to find faces in an image and then classify each found face into age-ranges defined above.
def classify_age(img):
# Making a copy of the image for overlay of ages and making a grayscale copy for passing to the loaded model for age classification.
img_copy = np.copy(img)
img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# Detecting faces in the image using the face_cascade loaded above and storing their coordinates into a list.
faces = face_cascade.detectMultiScale(img_copy, scaleFactor=1.2, minNeighbors=6, minSize=(100, 100))
# print(f"{len(faces)} faces found.")
face_age = -1
# Looping through each face found in the image.
for i, (x, y, w, h) in enumerate(faces):
# Drawing a rectangle around the found face.
face_rect = cv2.rectangle(img_copy, (x, y), (x + w, y + h), (0, 100, 0), thickness=2)
# Predicting the age of the found face using the model loaded above.
x2, y2, w2, h2 = shrink_face_roi(x, y, w, h)
face_roi = img_gray[y2:y2 + h2, x2:x2 + w2]
face_roi = cv2.resize(face_roi, (200, 200))
face_roi = face_roi.reshape(-1, 200, 200, 1)
face_age = age_ranges[np.argmax(model.predict(face_roi))]
face_age_pct = f"({round(np.max(model.predict(face_roi)) * 100, 2)}%)"
# Calling the above defined function to create the predicted age overlay on the image.
face_age_background, face_age_text, yrsold_text = create_age_text(img_copy, face_age, face_age_pct, x, y, w, h)
# print(f"Age prediction for face {i+1} : {face_age} years old")
return img_copy, face_age
# Defining a function to return the image filepath with a new filename.
# If INPUT filepath is "my_folder1/my_folder2/my_image.jpg", OUTPUT filepath will be "my_folder1/my_folder2/my_image_WITH_AGE.jpg"
def new_img_name(org_img_path):
img_path, img_name_ext = os.path.split(org_img_path)
img_name, img_ext = os.path.splitext(img_name_ext)
new_img_name_ext = img_name + "_WITH_AGE" + img_ext
new_img_path = os.path.join(img_path, new_img_name_ext)
return new_img_path
# Defining a function to return the video filepath with a new filename.
# If INPUT filepath is "my_folder1/my_folder2/my_video.mp4", OUTPUT filepath will be "my_folder1/my_folder2/my_video_WITH_AGE.mp4"
def new_vid_name(org_vid_path):
vid_path, vid_name_ext = os.path.split(org_vid_path)
vid_name, vid_ext = os.path.splitext(vid_name_ext)
new_vid_name_ext = vid_name + "_WITH_AGE" + ".mp4"
new_vid_path = os.path.join(vid_path, new_vid_name_ext)
return new_vid_path
# Provide the image filepath as a string below.
# For example: "my_image.jpg" or "/content/drive/My Drive/my_folder/my_image.png"
def detect(path_to_img):
my_image = path_to_img
# Reading the image from filepath provided above and passing it through the age clasification method defined above.
img = cv2.imread(my_image)
age_img, face_age = classify_age(img)
# Saving the new generated image with a new name at the same location.
try:
new_my_image = new_img_name(my_image)
cv2.imwrite(new_my_image, age_img)
# print(f"Saved to {new_my_image}")
except:
print("Error: Could not save image!")
# cv2.imshow("Age Detection on Image", age_img)
# cv2.waitKey(0)
print(face_age)
return face_age | Age-detection | /Age_detection-0.0.1-py3-none-any.whl/src/DetectAge.py | DetectAge.py |
# AGEAS
[](https://www.python.org/)
**AGEAS (AutoML-based Genomic fEatrue extrAction System)** is to find key genomic factors, including genes and regulatory pathways, in determining cellular phenotype.
# Install
[](http://github.com/slundberg/shap)
[](https://github.com/scipy/scipy)
[](https://github.com/pytorch/pytorch)
[](https://github.com/numpy/numpy)
[](https://github.com/pandas-dev/pandas)
[](https://github.com/dmlc/xgboost)
[](https://github.com/paulbrodersen/netgraph)
[](https://github.com/networkx/networkx)
[](https://github.com/matplotlib/matplotlib)
[](https://github.com/scikit-learn/scikit-learn)
Ageas can be installed from [PyPI](https://pypi.org/project/Ageas/):
<pre>
pip install ageas
</pre>
# Tutorial
Please visit [Documentation Page](https://nkmtmsys.github.io/Ageas/) for detailed API documentations and examples
# Contributors
+ Jack Yu (JackSSK)
+ Masayoshi Nakamoto (nkmtmsys)
# Disclaimer
Further developed from [Odysseia Project](https://www.biorxiv.org/content/10.1101/2022.02.17.480852v1)
| Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/README.md | README.md |
import re
import os
import sys
import copy
import time
import threading
import warnings
from pkg_resources import resource_filename
import ageas
import ageas.tool.json as json
import ageas.lib.psgrn_caster as psgrn
import ageas.lib.meta_grn_caster as meta_grn
import ageas.lib.config_maker as config_maker
import ageas.lib.atlas_extractor as extractor
import ageas.database_setup.binary_class as binary_db
GRP_TYPES = ['Standard', 'Outer', 'Bridge', 'Mix']
class Launch:
"""
Main function to launch AGEAS
Args:
class1_path: <str> Default = None
Path to file or folder being considered as type class 1 data
class2_path: <str> Default = None
Path to file or folder being considered as type class 2 data
clf_keep_ratio: <float> Default = 0.5
Portion of classifier model to keep after each model selection
iteration.
.. note::
When performing SHA based model selection, this value is
set as lower bound to keep models
clf_accuracy_thread: <float> Default = 0.8
Filter thread of classifier's accuracy in local test performed at
each model selection iteration
.. note::
When performing SHA based model selection, this value is
only used at last iteration
correlation_thread: <float> Default = 0.2
Gene expression correlation thread value of GRPs
Potential GRPs failed to reach this value will be dropped
cpu_mode: <bool> Default = False
Whether force to use CPU only or not
database_path: <str> Default = None
Database header. If specified, class1_path and class2_path will be
rooted here.
database_type: <str> Default = 'gem_files'
Type of data class1_path and class1_path are directing to
Supporting:
'gem_files': Each path is directing to a GEM file.
Pseudo samples will be generated with sliding window algo
'gem_folders': Each path is directing to a GEM folder. Files in
each folder will be used to generate pseudo samples
'mex_folders': Each path is directing to a folder consisting MEX
files(***matrix.mtx***, ***genes.tsv***, ***barcodes.tsv***)
Pseudo samples will be generated with sliding window tech
factor_name_type: <str> Default = 'gene_name'
What type of ID name to use for each gene.
Supporting:
'gene_name': Gene Symbols/Names
'ens_id': Ensembl ID
.. note::
If using BioGRID as interaction database, factor_name_type
must be set to 'gene_name' for now.
# TODO: Find a way to map gene names with Ensembl IDs
feature_dropout_ratio: <float> Default = 0.1
Portion of features(GRPs) to be dropped out after each iteration of
feature selection.
feature_select_iteration: <int> Default = 1
Number of iteration for feature(GRP) selection before
key GRP extraction
interaction_database: <str> Default = 'gtrd'
Which interaction database to use for confirming a GRP has a high
possibility to exist.
Supporting:
None: No database will be used. As long as a GRP can pass all
related filters, it's good to go.
'gtrd': Using GTRD as regulatory pathway reference
https://gtrd.biouml.org/
'biogrid': Using BioGRID as regulatory pathway reference
https://thebiogrid.org/
impact_depth: <int> Default = 3
When assessing a TF's regulatory impact on other genes, how far the
distance between TF and potential regulatory source can be.
.. note::
The correlation strength of stepped correlation strength of TF
and gene still need to be greater than correlation_thread.
top_grp_amount: <int> Default = 100
Amount of GRPs an AGEAS unit would extract.
.. note::
If outlier_thread is set, since outlier GRPs are extracted
during feature selection part and will also be considered as
key GRPs, actual amount of key GRPs would be greater.
grp_changing_thread: <float> Default = 0.05
If changing portion of key GRPs extracted by AGEAS unit from two
stabilize iterations lower than this thread, these two iterations
will be considered as having consistent result.
log2fc_thread: <float> Default = None
Log2 fold change thread to filer non-differntial expressing genes.
.. note::
It's generally not encouraged to set up this filter since it can
result in lossing key TFs not having great changes on overall
expression volume but having changes on expression pattern.
If local computational power is relatively limited, setting up
this thread can help a lot to keep program runable.
link_step_allowrance: <int> Default = 1
During key atlas extraction, when finding bridge GRPs to link 2
separate regulons, how many steps will be allowed.
link_step_allowrance == 1 means, no intermediate gene can be used
and portential regulatory source must be able to interact with gene
from another regulon.
meta_load_path: <str> Default = None
Path to load meta_GRN
meta_save_path: <str> Default = None
Path to save meta_GRN
model_config_path: <str> Default = None
Path to load model config file which will be used to initialize
classifiers
model_select_iteration: <int> Default = 2
Number of iteration for classification model selection before
the mandatory filter.
mute_unit: <bool> Default = True
Whether AGEAS unit print out log while running.
.. note::
It's not mandatory but encouraged to remain True especially
when using multi protocol
mww_p_val_thread: <str> Default = 0.05
Gene expression Mann–Whitney–Wilcoxon test p-value thread.
To make sure one gene's expression profile is not constant among
differnt classes.
outlier_thread: <float> Default = 3.0
The lower bound of Z-score scaled importance value to consider a GRP
as outlier need to be retain.
protocol: <str> Default = 'solo'
AGEAS unit launching protocol.
Supporting:
'solo': All units will run separately
'multi': All units will run parallelly by multithreading
patient: <int> Default = 3
If stabilize iterations continuously having consistent result for
this value, an early stop on result stabilization will be executed.
psgrn_load_path: <str> Default = None
Path to load pseudo-sample GRNs.
psgrn_save_path: <str> Default = None
Path to save pseudo-sample GRNs.
prediction_thread: <str> or <float> Default = 'auto'
The importance thread for a GRP predicted with GRNBoost2-like algo
to be included.
Supporting:
'auto': Automatically set up thread value by minimum imporatnace
value of a interaction database recorded GRP of TF having
most amount of GRPs. If not using interaction database, it
will be set by (1 / amount of genes)
float type: Value will be set as thread directly
report_folder_path: <str> Default = None
Path to create folder for saving AGEAS report files.
save_unit_reports: <bool> Default = False
Whether saving key GRPs extracted by each AGEAS Unit or not.
If True, reports will be saved in report_folder_path under folders
named 'no_{}'.format(unit_num) starting from 0.
specie: <str> Default = 'mouse'
Specify which sepcie's interaction database shall be used.
Supporting:
'mouse'
'human'
sliding_window_size: <int> Default = 10
Number of samples a pseudo-sample generated with
sliding window technique contains.
sliding_window_stride: <int> Default = None
Stride of sliding window when generating pseudo-samples.
std_value_thread: <float> Default = None
Set up gene expression standard deviation thread by value.
To rule out genes having relatively constant expression in each type
class.
std_ratio_thread: <float> Default = None
Set up gene expression standard deviation thread by portion.
Only genes reaching top portion will be kept in each type class.
stabilize_iteration: <int> Default = 10
Number of iteration for a AGEAS unit to repeat key GRP extraction
after model and feature selections in order to find key GRPs
consistently being important.
max_train_size: <float> Default = 0.95
The largest portion of avaliable data can be used to train models.
At the mandatory model filter, this portion of data will be given to
each model to train.
unit_num: <int> Default = 2
Number of AGEAS units to launch.
warning_filter: <str> Default = 'ignore'
How warnings should be filtered.
For other options, please check 'The Warnings Filter' section in:
https://docs.python.org/3/library/warnings.html#warning-filter
z_score_extract_thread: <float> Default = 0.0
The lower bound of Z-score scaled importance value to extract a GRP.
Inputs: None
Outputs: None
Attributes:
Examples::
>>> easy = ageas.Launch(
class1_path = 'Odysseia/2kTest/ips.csv',
class2_path = 'Odysseia/2kTest/mef.csv',
)
"""
def __init__(self,
class1_path:str = None,
class2_path:str = None,
clf_keep_ratio:float = 0.5,
clf_accuracy_thread:float = 0.8,
correlation_thread:float = 0.2,
cpu_mode:bool = False,
database_path:str = None,
database_type:str = 'gem_files',
factor_name_type:str = 'gene_name',
feature_dropout_ratio:float = 0.1,
feature_select_iteration:int = 1,
interaction_database:str = 'gtrd',
impact_depth:int = 3,
top_grp_amount:int = 100,
grp_changing_thread:float = 0.05,
log2fc_thread:float = None,
link_step_allowrance:int = 1,
meta_load_path:str = None,
meta_save_path:str = None,
model_config_path:str= None,
model_select_iteration:int = 2,
mww_p_val_thread:str = 0.05,
outlier_thread:float = 3.0,
protocol:str = 'solo',
patient:int = 3,
psgrn_load_path:str = None,
psgrn_save_path:str = None,
prediction_thread = 'auto',
report_folder_path:str = None,
save_unit_reports:bool = False,
specie:str = 'mouse',
sliding_window_size:int = 10,
sliding_window_stride:int = None,
std_value_thread:float = None,
std_ratio_thread:float = None,
stabilize_iteration:int = 10,
max_train_size:float = 0.95,
unit_num:int = 2,
unit_silent:bool = True,
warning_filter:str = 'ignore',
z_score_extract_thread:float = 0.0,
):
super(Launch, self).__init__()
""" Initialization """
print('Launching Ageas')
warnings.filterwarnings(warning_filter)
start = time.time()
self.reports = list()
self.protocol = protocol
self.unit_num = unit_num
self.silent = unit_silent
self.impact_depth = impact_depth
# Get database information
self.database_info = binary_db.Setup(
database_path,
database_type,
class1_path,
class2_path,
specie,
factor_name_type,
interaction_database,
sliding_window_size,
sliding_window_stride
)
# Get model configs
if model_config_path is None:
path = resource_filename(__name__, 'data/config/list_config.js')
self.model_config = config_maker.List_Config_Reader(path)
else:
self.model_config = json.decode(model_config_path)
# Prepare report folder
self.report_folder_path = report_folder_path
if self.report_folder_path is not None:
if self.report_folder_path[-1] != '/':
self.report_folder_path += '/'
if not os.path.exists(self.report_folder_path):
os.makedirs(self.report_folder_path)
self.save_unit_reports = save_unit_reports
if self.save_unit_reports and self.report_folder_path is None:
raise Exception('Report Path must be given to save unit reports!')
print('Time to Boot: ', time.time() - start)
# Make or load psGRNs and meta GRN
start = time.time()
if meta_load_path is not None and psgrn_load_path is not None:
self.meta = meta_grn.Cast(load_path = meta_load_path)
self.pseudo_grns = psgrn.Make(load_path = psgrn_load_path)
else:
self.meta, self.pseudo_grns = self.get_pseudo_grns(
database_info = self.database_info,
std_value_thread = std_value_thread,
std_ratio_thread = std_ratio_thread,
mww_p_val_thread = mww_p_val_thread,
log2fc_thread = log2fc_thread,
prediction_thread = prediction_thread,
correlation_thread = correlation_thread,
meta_load_path = meta_load_path,
)
# Meta GRN Analysis
self.meta_report = meta_grn.Analysis(self.meta.grn)
# Save docs if specified path
if self.report_folder_path is not None:
self.meta_report.save(self.report_folder_path + 'meta_report.csv')
if psgrn_save_path is not None:
self.pseudo_grns.save(psgrn_save_path)
if meta_save_path is not None:
self.meta.grn.save_json(meta_save_path)
print('Time to cast or load Pseudo-Sample GRNs : ', time.time() - start)
print('\nDeck Ready')
start = time.time()
# Initialize a basic unit
self.basic_unit = ageas.Unit(
meta = self.meta,
pseudo_grns = self.pseudo_grns,
model_config = self.model_config,
database_info = self.database_info,
cpu_mode = cpu_mode,
correlation_thread = correlation_thread,
top_grp_amount = top_grp_amount,
z_score_extract_thread = z_score_extract_thread,
max_train_size = max_train_size,
clf_keep_ratio = clf_keep_ratio,
clf_accuracy_thread = clf_accuracy_thread,
model_select_iteration = model_select_iteration,
outlier_thread = outlier_thread,
feature_dropout_ratio = feature_dropout_ratio,
feature_select_iteration = feature_select_iteration,
patient = patient,
grp_changing_thread = grp_changing_thread,
stabilize_iteration = stabilize_iteration,
impact_depth = impact_depth,
link_step_allowrance = link_step_allowrance,
)
self.lockon = threading.Lock()
print('Protocol:', self.protocol)
print('Silent:', self.silent)
# Do everything unit by unit
if self.protocol == 'solo':
self.proto_solo()
# Multithreading protocol
elif self.protocol == 'multi':
self.proto_multi()
self.atlas = self.combine_unit_reports()
print('Operation Time: ', time.time() - start)
if self.report_folder_path is not None:
print('Generating Report Files')
self._save_atlas_as_json(
self.atlas.regulons,
self.report_folder_path + 'key_atlas.js'
)
self.atlas.report(self.meta.grn).to_csv(
self.report_folder_path + 'report.csv',
index = False
)
print('\nComplete\n')
# Protocol SOLO
def proto_solo(self):
for i in range(self.unit_num):
id = 'RN_' + str(i)
new_unit = copy.deepcopy(self.basic_unit)
print('Unit', id, 'Ready')
print('\nSending Unit', id, '\n')
if self.silent: sys.stdout = open(os.devnull, 'w')
new_unit.select_models()
new_unit.launch()
new_unit.generate_regulons()
self.reports.append(new_unit.atlas)
if self.silent: sys.stdout = sys.__stdout__
print(id, 'RTB\n')
# Protocol MULTI
def proto_multi(self):
units = []
for i in range(self.unit_num):
id = 'RN_' + str(i)
units.append(threading.Thread(target=self.multi_unit, name=id))
print('Unit', id, 'Ready')
# Time to work
print('\nSending All Units\n')
if self.silent: sys.stdout = open(os.devnull, 'w')
# Start each unit
for unit in units: unit.start()
# Wait till all thread terminates
for unit in units: unit.join()
if self.silent: sys.stdout = sys.__stdout__
print('Units RTB\n')
# Model selection and regulon contruction part run parallel
def multi_unit(self,):
new_unit = copy.deepcopy(self.basic_unit)
new_unit.select_models()
# lock here since SHAP would bring Error
self.lockon.acquire()
new_unit.launch()
self.lockon.release()
new_unit.generate_regulons()
self.reports.append(new_unit.atlas)
del new_unit
# Combine information from reports returned by each unit
def combine_unit_reports(self):
all_grps = dict()
for index, atlas in enumerate(self.reports):
# save unit report if asking
if self.save_unit_reports:
report_path = self.report_folder_path + 'no_' + str(index) + '/'
if not os.path.exists(report_path): os.makedirs(report_path)
atlas.grps.save(report_path + 'grps_importances.txt')
json.encode(atlas.outlier_grps, report_path+'outlier_grps.js')
for regulon in atlas.regulons.values():
for id, record in regulon.grps.items():
if id not in all_grps:
all_grps[id] = record
elif id in all_grps:
all_grps[id] = self._combine_grp_records(
record_1 = all_grps[id],
record_2 = record
)
# now we build regulons
regulon = extractor.Extract()
for id, grp in all_grps.items():
regulon.update_regulon_with_grp(
grp = grp,
meta_grn = self.meta.grn
)
regulon.find_bridges(meta_grn = self.meta.grn)
regulon.update_genes(impact_depth = self.impact_depth)
regulon.change_regulon_list_to_dict()
return regulon
# get pseudo-cGRNs from GEMs or GRNs
def get_pseudo_grns(self,
database_info = None,
std_value_thread = 100,
std_ratio_thread = None,
mww_p_val_thread = 0.05,
log2fc_thread = 0.1,
prediction_thread = 'auto',
correlation_thread = 0.2,
meta_load_path = None
):
meta = None
# if reading in GEMs, we need to construct pseudo-cGRNs first
# or if we are reading in MEX, make GEM first and then mimic GEM mode
if (re.search(r'gem' , database_info.type) or
re.search(r'mex' , database_info.type)):
gem_data = binary_db.Load_GEM(
database_info,
mww_p_val_thread,
log2fc_thread,
std_value_thread
)
start1 = time.time()
# Let kirke casts GRN construction guidance first
meta = meta_grn.Cast(
gem_data = gem_data,
prediction_thread = prediction_thread,
correlation_thread = correlation_thread,
load_path = meta_load_path
)
print('Time to cast Meta GRN : ', time.time() - start1)
psGRNs = psgrn.Make(
database_info = database_info,
std_value_thread = std_value_thread,
std_ratio_thread = std_ratio_thread,
correlation_thread = correlation_thread,
gem_data = gem_data,
meta_grn = meta.grn
)
# if we are reading in GRNs directly, just process them
elif re.search(r'grn' , database_info.type):
psGRNs = None
print('trainer.py: mode GRN need to be revised here')
else:
raise lib.Error('Unrecogonized database type: ', database_info.type)
return meta, psGRNs
# combine information of same GRP form different reports
def _combine_grp_records(self, record_1, record_2):
answer = copy.deepcopy(record_1)
if answer.type != record_2.type:
if answer.type == GRP_TYPES[2]:
assert answer.score == 0
if record_2.type != GRP_TYPES[2]:
answer.type = record_2.type
answer.score = record_2.score
else:
if record_2.type != GRP_TYPES[2]:
answer.type = GRP_TYPES[3]
answer.score = max(answer.score, record_2.score)
else:
answer.score = max(answer.score, record_2.score)
return answer
# change class objects to dicts and save regulons in JSON format
def _save_atlas_as_json(self, regulons, path):
json.encode({k:v.as_dict() for k,v in regulons.items()}, path) | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/_main.py | _main.py |
import math
import numpy as np
import networkx as nx
from warnings import warn
import matplotlib as mpl
import matplotlib.cm as cmx
import matplotlib.pyplot as plt
import matplotlib.image as img
import matplotlib.colors as colors
from scipy.special import softmax
from netgraph import Graph
from netgraph import InteractiveGraph
import ageas.tool.grn as grn
import ageas.tool.json as json
TYPES = ['Standard', 'Outer', 'Bridge', 'Mix']
class Plot_Regulon(object):
"""
Visualize full or partial Regulon and save in PDF format by default
"""
def __init__(self,
root_gene:str = None,
weight_thread:float = 0.0,
graph_type:str = 'all',
impact_depth:int = 1,
hide_bridge:bool = True,
file_path:str = None,
regulon_id:str = 'regulon_0',
):
super(Plot_Regulon, self).__init__()
self.root_gene = root_gene
self.graph_type = str(graph_type)
self.hide_bridge = hide_bridge
self.impact_depth = impact_depth
self.weight_thread = weight_thread
# Load in regulon to plot
regulon = grn.GRN(id = regulon_id)
regulon.load_dict(json.decode(file_path)[regulon_id])
# Plot the whole regulon or set a node as root?
if self.root_gene is None:
# add every qualified GRP in given regulon
grps_to_plot = {
k:None for k,v in regulon.grps.items() if self.__check(v.type)
}
else:
# only testify GRPs reachable to root_gene in given depth
grps_to_plot = dict()
self.__find_grps(regulon, root_gene, grps_to_plot, impact_depth)
# get weight for GRPs and filter GRPs based on correlation thread
grps_to_plot = self.__weight_filter(grps_to_plot, regulon)
# now we make the graph
self.graph = regulon.as_digraph(grp_ids = grps_to_plot.keys())
# make sure we have something to play with
if len(self.graph) <= 0: raise Exception('No GRPs in Graph!')
# check type
def __check(self, type): return type != TYPES[2] or not self.hide_bridge
# filter GRPs based on correlation thread
def __weight_filter(self, grp_ids, regulon):
answer = {}
for id in grp_ids:
weight = self.get_weight(regulon.grps[id].correlations)
if abs(weight) >= self.weight_thread:
answer[id] = None
regulon.grps[id].weight = weight
return answer
# recursively find GRPs able to link with root_gene in given depth
def __find_grps(self, regulon, gene, dict, depth):
if depth >= 1:
depth -= 1
for tar in regulon.genes[gene].target + regulon.genes[gene].source:
id = grn.GRP(gene, tar).id
# add id to dict accordingly
if id not in dict and self.__check(regulon.grps[id].type):
dict[id] = None
# call self recurssionly
self.__find_grps(regulon, tar, dict, depth)
# make sure which GRPs are qualified and plot them
def draw(self,
scale:int = 1,
seed:int = 1936,
node_base_size:int = 2,
figure_size:int = 20,
method:str = 'netgraph',
legend_size:int = 10,
layout:str = 'spring',
colorbar_shrink:float = 0.25,
font_size:int = 10,
hide_target_labels:bool = False,
edge_width_scale:float = 4.0,
save_path:str = None
):
# initialization
self.node_cmap = plt.cm.Set3
self.edge_cmap = plt.cm.coolwarm
# Color mapping
self.edge_scalar_map = cmx.ScalarMappable(
norm = colors.Normalize(vmin = -1, vmax = 1, clip = True),
cmap = self.edge_cmap
)
self.node_scalar_map = cmx.ScalarMappable(
norm = colors.Normalize(vmin = 0, vmax = 1, clip = True),
cmap = self.node_cmap
)
fig = plt.figure(figsize = (figure_size, figure_size))
ax = plt.gca()
ax.set_axis_off()
# draw with specified method
if method == 'networkx':
self.draw_with_networkx(
ax = ax,
scale = scale,
layout = layout,
base_size = node_base_size,
font_size = font_size,
hide_target_labels = hide_target_labels,
edge_width_scale =edge_width_scale,
)
elif method == 'netgraph':
self.draw_with_netgraph(
ax = ax,
scale = scale,
seed = seed,
layout = layout,
base_size = node_base_size,
font_size = font_size,
hide_target_labels = hide_target_labels,
edge_width_scale = edge_width_scale,
)
# add color bar and legend
self.set_legend(
legend_size = legend_size,
method = method
)
self.set_color_bar(
ax = ax,
shrink = colorbar_shrink,
font_size = font_size
)
fig.tight_layout()
if save_path is not None:
self.save(save_path)
# Netgraph plot method
def draw_with_netgraph(self,
ax = plt.gca(),
base_size:int = 2,
scale:int = 1,
seed:int = 1936,
layout:str = 'spring',
font_size:int = 5,
hide_target_labels:bool = False,
edge_width_scale:float = 0.1,
):
node_size, node_color, node_alhpa, node_labels = self.get_node_info(
base_size = base_size,
color_type = 'rgba',
hide_target_labels = hide_target_labels,
)
edge_width, edge_style, edge_color, edge_alpha = self.get_edge_info(
width_scale = edge_width_scale,
color_type = 'rgba',
)
node_shape = {node:'o' for node in self.graph.nodes}
for node, data in self.graph.nodes(data = True):
if data['type'] == 'TF':
node_shape[node] = 'd'
plot = Graph(
graph = self.graph,
node_layout = layout,
node_size = node_size,
node_color = node_color,
node_shape = node_shape,
node_edge_color = node_color,
node_label_fontdict = {'size':font_size},
node_alpha = node_alhpa,
node_labels = node_labels,
edge_width = edge_width,
edge_cmap = self.edge_cmap,
edge_color = edge_color,
edge_alpha = edge_alpha,
arrows = True,
)
# Networkx plot method
def draw_with_networkx(self,
ax = plt.gca(),
base_size:int = 600,
scale:int = 1,
seed:int = 1914,
layout:str = 'spring',
font_size:int = 5,
hide_target_labels:bool = False,
edge_width_scale:float = 1.0,
):
node_size, node_color, node_alhpa, node_labels = self.get_node_info(
base_size = base_size,
hide_target_labels = hide_target_labels,
)
edge_width, edge_style, edge_color, edge_alpha = self.get_edge_info(
width_scale = edge_width_scale,
)
node_size = [node_size[node] for node in self.graph.nodes]
node_color = [node_color[node] for node in self.graph.nodes]
edge_width = [edge_width[(u,v)] for (u,v) in self.graph.edges]
edge_style = [edge_style[(u,v)] for (u,v) in self.graph.edges]
edge_color = [edge_color[(u,v)] for (u,v) in self.graph.edges]
edge_alpha = [edge_alpha[(u,v)] for (u,v) in self.graph.edges]
# specify layout
if layout == 'circular':
pos = nx.circular_layout(self.graph, scale = scale,)
elif layout == 'spring':
pos = nx.spring_layout(
self.graph,
scale = scale,
seed = seed,
k = max(node_size),
)
elif layout == 'randon':
pos = nx.random_layout(self.graph, seed = seed)
elif layout == 'kamada_kawai':
pos = nx.kamada_kawai_layout(self.graph)
elif layout == 'graphviz':
pos = nx.nx_pydot.graphviz_layout(self.graph)
elif layout == 'planar':
pos = nx.planar_layout(self.graph)
elif layout == 'spectral':
pos = nx.spectral_layout(self.graph)
# Draw Nodes, Labels, and Edges
nodes = nx.draw_networkx_nodes(
G = self.graph,
pos = pos,
cmap = self.node_cmap,
node_size = node_size,
node_color = node_color,
alpha = node_alhpa,
)
labels = nx.draw_networkx_labels(
G = self.graph,
pos = pos,
labels = node_labels,
font_size = font_size,
clip_on = True,
)
edges = nx.draw_networkx_edges(
G = self.graph,
pos = pos,
node_size = node_size,
arrowstyle = "-|>",
arrowsize = 20,
edge_color = edge_color,
edge_cmap = self.edge_cmap,
width = edge_width,
style = edge_style,
)
# set alpha value for each edge
for i in range(self.graph.number_of_edges()):
edges[i].set_alpha(edge_alpha[i])
# Get Edge Information
def get_edge_info(self,
width_scale = 1,
color_type = 'int',
return_type = 'dict'
):
edge_width = dict()
edge_style = dict()
edge_color = dict()
edge_alpha = dict()
for (source, target, data) in self.graph.edges(data = True):
key = (source, target)
# set edge width
edge_width[key] = abs(data['weight']) * width_scale
# set info by type
if data['type'] == TYPES[2]:
edge_style[key] = ':'
else:
edge_style[key] = '-'
# set up alpha value
edge_alpha[key] = min(1, abs(data['weight']))
# set color
if data['weight'] < 0:
edge_color[key] = -1
else:
edge_color[key] = 1
# change color to rgba format if specified
if color_type == 'rgba':
edge_color[key] = self.edge_scalar_map.to_rgba(edge_color[key])
return edge_width, edge_style, edge_color, edge_alpha
# Get Node information
def get_node_info(self,
base_size = 800,
hide_target_labels = False,
color_type = 'int',
return_type = 'dict'
):
node_size = dict()
node_color = dict()
node_alhpa = 0.8
node_labels = {n:n for n in self.graph}
for node, data in self.graph.nodes(data = True):
factor = 1
# target_num = len([i for i in self.graph.successors(node)])
target_num = len(data['target'])
if target_num > 0:
node_color[node] = 1
# increase node size according to gene's reg power
if target_num > 10:
factor = math.log10(target_num) * 2
else:
node_color[node] = 0
if hide_target_labels:
del node_labels[node]
# change color to rgba format if specified
if color_type == 'rgba':
node_color[node]=self.node_scalar_map.to_rgba(node_color[node])
size = base_size * factor
node_size[node] = size
return node_size, node_color, node_alhpa, node_labels
# just as named
def get_weight(self, correlations):
if self.graph_type == 'class1' or self.graph_type == '1':
weight = correlations['class1']
elif self.graph_type == 'class2' or self.graph_type == '2':
weight = correlations['class2']
elif self.graph_type == 'all':
weight = abs(correlations['class1']) - abs(correlations['class2'])
return weight
# Set up a color bar with fixed scale from -1.0 to 1.0
def set_color_bar(self, ax, shrink = 1, font_size = 10):
cbar = plt.colorbar(self.edge_scalar_map, ax = ax, shrink = shrink)
if self.graph_type == 'all':
labels = cbar.ax.get_yticklabels()
labels[0] = 'Stronger in Class2'
# only set mid tick label when it's odd length
if (len(labels) % 2) == 1:
labels[int(len(labels) / 2)] = 'No Difference'
labels[-1] = 'Stronger in Class1'
cbar.set_ticklabels(labels)
cbar.ax.tick_params(labelsize = font_size)
cbar.ax.set_ylabel(
'Gene Expression Correlation',
fontsize = font_size,
fontweight = 'bold',
labelpad = 12.0,
rotation = 270
)
# Set Up Legend
def set_legend(self, legend_size, method):
plt.scatter(
[],[],
s = legend_size * 10,
marker = 'o',
c = [self.node_cmap(0)],
label = 'Regulatory Target'
)
plt.scatter(
[],[],
s = legend_size * 10,
marker = 'o',
c = [self.node_cmap(1)],
label = 'Regulatory Source'
)
if method == 'netgraph':
plt.scatter(
[],[],
s = legend_size * 10,
marker = 'd',
c = ['black'],
label = 'TF'
)
plt.scatter(
[],[],
s = legend_size * 10,
marker = 'o',
c = ['black'],
label = 'Gene'
)
elif method == 'networkx':
plt.plot(
[], [],
linestyle = 'dashed',
c = 'black',
label = 'Bridge GRP'
)
plt.plot(
[], [],
linestyle = 'solid',
c = 'black',
label = 'Key GRP'
)
plt.legend(loc = 1, prop = {'size': legend_size})
# save the plot. PDF by default
def save(self, path:str = None, format:str = 'pdf'):
plt.savefig(path, format = format)
plt.close()
# show the interactive graph
def show(self):
plt.show()
if __name__ == '__main__':
header = 'liverCCl4/hsc_pf_a6w/'
for i in range(1):
folder_path = header + 'run_' + str(i) + '/'
atlas_path = folder_path + 'key_atlas.js'
a = Plot_Regulon(
file_path = atlas_path,
regulon_id = 'regulon_0',
hide_bridge = False,
graph_type = 'class2',
root_gene = 'HAND2',
# impact_depth = 1,
)
a.draw(save_path = folder_path + 'ultimate_useless_mess.pdf',) | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/_visualizer.py | _visualizer.py |
import time
import ageas.lib.clf_trainer as trainer
import ageas.lib.clf_interpreter as interpreter
import ageas.lib.atlas_extractor as extractor
class Unit:
"""
Get candidate key factors and pathways
and write report files into given folder
"""
def __init__(self,
# Processed in Launch Initialization
database_info = None,
meta = None,
model_config = None,
pseudo_grns = None,
# Parameters
cpu_mode:bool = False,
clf_keep_ratio:float = 0.5,
clf_accuracy_thread:float = 0.8,
correlation_thread:float = 0.0,
feature_dropout_ratio:float = 0.1,
feature_select_iteration:int = 1,
grp_changing_thread:float = 0.05,
impact_depth:int = 3,
link_step_allowrance:int = 0,
max_train_size:float = 0.95,
model_select_iteration:int = 2,
outlier_thread:float = 3.0,
patient:int = 3,
stabilize_iteration:int = 10,
top_grp_amount:int = 100,
z_score_extract_thread:float = 0.0,
):
super(Unit, self).__init__()
""" Initialization """
self.far_out_grps = {}
self.no_change_iteration_num = 0
self.meta = meta
self.pseudo_grns = pseudo_grns
self.model_config = model_config
self.database_info = database_info
self.cpu_mode = cpu_mode
self.correlation_thread = correlation_thread
self.top_grp_amount = top_grp_amount
self.z_score_extract_thread = z_score_extract_thread
self.max_train_size = max_train_size
self.clf_keep_ratio = clf_keep_ratio
self.clf_accuracy_thread = clf_accuracy_thread
self.model_select_iteration = model_select_iteration
self.outlier_thread = outlier_thread
self.feature_dropout_ratio = feature_dropout_ratio
self.feature_select_iteration = feature_select_iteration
self.patient = patient
self.grp_changing_thread = grp_changing_thread
self.stabilize_iteration = stabilize_iteration
self.impact_depth = impact_depth
self.link_step_allowrance = link_step_allowrance
# Select Classification models for later interpretations
def select_models(self,):
print('\nEntering Model Selection')
start = time.time()
# initialize trainer
self.clf = trainer.Train(
psGRNs = self.pseudo_grns,
cpu_mode = self.cpu_mode,
database_info = self.database_info,
model_config = self.model_config,
)
# start model selection
self.clf.successive_pruning(
iteration = self.model_select_iteration,
clf_keep_ratio = self.clf_keep_ratio,
clf_accuracy_thread = self.clf_accuracy_thread,
last_train_size = self.max_train_size
)
print('Finished Model Selection', time.time() - start)
def launch(self,):
start = time.time()
self.grp_importances = interpreter.Interpret(self.clf)
self.atlas = extractor.Extract(
self.correlation_thread,
self.grp_importances,
self.z_score_extract_thread,
self.far_out_grps,
self.top_grp_amount
)
print('Time to interpret 1st Gen classifiers : ', time.time() - start)
""" Feature Selection """
if (self.feature_select_iteration is not None and
self.feature_select_iteration > 0):
print('\nEntering Feature Selection')
for i in range(self.feature_select_iteration):
start = time.time()
prev_grps = self.atlas.top_grps.index
rm = self.__get_grp_remove_list(
self.grp_importances.result,
self.feature_dropout_ratio,
self.outlier_thread
)
self.pseudo_grns.update_with_remove_list(rm)
self.clf.clear_data()
self.clf.grns = self.pseudo_grns
self.clf.general_process(
train_size = self.max_train_size,
clf_keep_ratio = self.clf_keep_ratio,
clf_accuracy_thread = self.clf_accuracy_thread
)
self.grp_importances = interpreter.Interpret(self.clf)
self.atlas = extractor.Extract(
self.correlation_thread,
self.grp_importances,
self.z_score_extract_thread,
self.far_out_grps,
self.top_grp_amount
)
print('Time to do a feature selection : ', time.time() - start)
if self.__early_stop(prev_grps, self.atlas.top_grps.index):
self.stabilize_iteration = None
break
print('Total Length of Outlier GRPs is:', len(self.far_out_grps))
""" Stabilizing Key GRPs """
if (self.stabilize_iteration is not None and
self.stabilize_iteration > 0):
print('\nStabilizing Key GRPs')
start = time.time()
denominator = 1
for i in range(self.stabilize_iteration):
denominator += i
prev_grps = self.atlas.top_grps.index
self.clf.general_process(
train_size = self.max_train_size,
clf_keep_ratio = self.clf_keep_ratio,
clf_accuracy_thread = self.clf_accuracy_thread
)
self.grp_importances.add(interpreter.Interpret(self.clf).result)
self.atlas = extractor.Extract(
self.correlation_thread,
self.grp_importances,
self.z_score_extract_thread,
self.far_out_grps,
self.top_grp_amount
)
if self.__early_stop(prev_grps, self.atlas.top_grps.index):
break
self.grp_importances.divide(denominator)
self.atlas = extractor.Extract(
self.correlation_thread,
self.grp_importances,
self.z_score_extract_thread,
self.far_out_grps,
self.top_grp_amount
)
print('Time to stabilize key GRPs : ', time.time() - start)
del self.grp_importances
# Construct Regulons with Extracted GRPs and Access Them
def generate_regulons(self,):
print('\nBuilding Regulons with key GRPs')
start = time.time()
self.atlas.build_regulon(
meta_grn = self.meta.grn,
impact_depth = self.impact_depth
)
# Attempting to Connect Regulons if necessary
if (self.link_step_allowrance is not None and
self.link_step_allowrance > 0 and
len(self.atlas.regulons) > 1):
self.atlas.link_regulon(
meta_grn = self.meta.grn,
allowrance = self.link_step_allowrance
)
self.atlas.change_regulon_list_to_dict()
print('Time to build key regulons : ', time.time() - start)
# take out some GRPs based on feature dropout ratio
def __get_grp_remove_list(self,
df = None,
feature_dropout_ratio = 0.2,
outlier_thread = 3
):
total_grp = len(df.index)
gate_index = int(total_grp * (1 - feature_dropout_ratio))
remove_list = list(df.index[gate_index:])
for ele in self.__get_outliers(df, outlier_thread):
self.far_out_grps[ele[0]] = ele[1]
remove_list.append(ele[0])
return remove_list
# get outlier based on IQR value and outlier thread
def __get_outliers(self, df, outlier_thread):
# not using outlier filter if thread set to none
if outlier_thread is None: return []
result = []
q3_value = df.iloc[int(len(df.index) * 0.25)]['importance']
q1_value = df.iloc[int(len(df.index) * 0.75)]['importance']
# set far out thread according to interquartile_range (IQR)
far_out_thread = 3 * (q3_value - q1_value)
# remove outliers as well
prev_score = outlier_thread * 3
for i in range(len(df.index)):
score = df.iloc[i]['importance']
if score >= max(far_out_thread, (prev_score / 3), outlier_thread):
result.append([df.index[i], score])
prev_score = score
else: break
return result
# Stop iteration if key genes are not really changing
def __early_stop(self, prev_grps = None, cur_grps = None):
# just keep going if patient not set
if self.patient is None: return False
common = len(list(set(prev_grps).intersection(set(cur_grps))))
change1 = (len(prev_grps) - common) / len(prev_grps)
change2 = (len(cur_grps) - common) / len(cur_grps)
change = (change1 + change2) / 2
print('Average Key GRPs Changing Portion:', change)
if change <= self.grp_changing_thread:
self.no_change_iteration_num += 1
if self.no_change_iteration_num == self.patient:
print('Run out of patient! Early stopping!')
return True
else: return False
else:
self.no_change_iteration_num = 0
return False | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/_unit.py | _unit.py |
import os
import re
import math
import numpy as np
import pandas as pd
import ageas.lib as lib
import ageas.tool as tool
import ageas.tool.gem as gem
import ageas.tool.mex as mex
import ageas.tool.json as json
import ageas.tool.gtrd as gtrd
import ageas.tool.biogrid as biogrid
import ageas.tool.transfac as transfac
import ageas.database_setup as db_setup
from ageas.lib.deg_finder import Find
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import train_test_split
class Setup:
"""
Storage database related setting variables
"""
def __init__(self,
database_path = None,
database_type = 'gem_files',
class1_path = 'CT1',
class2_path = 'CT2',
specie = 'mouse',
factor_name_type = 'gene_name',
interaction_db = 'biogrid',
sliding_window_size = None,
sliding_window_stride = None
):
super(Setup, self).__init__()
# Auto GEM folder finder
if database_path is None:
assert os.path.exists(class1_path) and os.path.exists(class2_path)
elif class1_path is None or class2_path is None:
if len(os.listdir(database_path)) != 2:
raise DB_Error('Please specify classes for binary clf')
else:
class1_path = os.listdir(database_path)[0]
class2_path = os.listdir(database_path)[1]
# Initialization
self.db_path = database_path
self.type = database_type
self.specie = specie
self.factor_name_type = factor_name_type
self.interaction_db = interaction_db
self.sliding_window_size = sliding_window_size
self.sliding_window_stride = sliding_window_stride
# Get classes'correspond folder paths
self.class1_path = self.__cast_path(class1_path)
self.class2_path = self.__cast_path(class2_path)
# Perform label encoding
self.label_transformer = Label_Encode(class1_path, class2_path)
self.label1 = self.label_transformer.get_label1()
self.label2 = self.label_transformer.get_label2()
# make path str for the input class based on data path and folder name
def __cast_path(self, path):
# no need to concat if path is already completed
if self.db_path is None:
return path
elif path[0] == '/':
return self.db_path + path
else:
return self.db_path + '/' + path
class Label_Encode:
"""
Transform labels into ints
"""
def __init__(self, class1_path, class2_path):
super(Label_Encode, self).__init__()
# Initialization
self.encoder = LabelEncoder().fit([class1_path, class2_path])
self.transformed_labels = self.encoder.transform(
[class1_path, class2_path]
)
# Perform inverse_transform
def getOriginLable(self, query):
return list(self.encoder.inverse_transform(query))
# As named
def get_label1(self): return self.transformed_labels[0]
def get_label2(self): return self.transformed_labels[1]
class Load_GEM:
"""
Load in GEM data sets
"""
def __init__(self,
database_info,
mww_thread = 0.05,
log2fc_thread = 0.1,
std_value_thread = 100,
std_ratio_thread = None
):
super(Load_GEM, self).__init__()
# Initialization
self.database_info = database_info
# Load TF databases based on specie
specie = db_setup.get_specie_path(__name__, self.database_info.specie)
# Load TRANSFAC databases
self.tf_list = transfac.Reader(
specie + 'Tranfac201803_MotifTFsF.txt',
self.database_info.factor_name_type
).tfs
# Load interaction database
if self.database_info.interaction_db == 'gtrd':
self.interactions = gtrd.Processor(
specie,
self.database_info.factor_name_type,
path = 'wholeGene.js.gz'
)
elif self.database_info.interaction_db == 'biogrid':
assert self.database_info.factor_name_type == 'gene_name'
self.interactions = biogrid.Processor(specie_path = specie)
# process file or folder based on database type
if self.database_info.type == 'gem_folders':
class1, class2 = self.__process_gem_folder(
std_value_thread,
std_ratio_thread
)
elif self.database_info.type == 'gem_files':
class1, class2 = self.__process_gem_file(
std_value_thread,
std_ratio_thread
)
elif self.database_info.type == 'mex_folders':
class1, class2 = self.__process_mex_folders(
std_value_thread,
std_ratio_thread
)
# Distribuition Filter if threshod is specified
if mww_thread is not None or log2fc_thread is not None:
self.genes = Find(
class1,
class2,
mww_thread = mww_thread,
log2fc_thread = log2fc_thread
).degs
self.class1 = class1.loc[class1.index.intersection(self.genes)]
self.class2 = class2.loc[class2.index.intersection(self.genes)]
else:
self.genes = class1.index.union(class2.index)
self.class1 = class1
self.class2 = class2
# Process in expression matrix file (dataframe) scenario
def __process_gem_file(self, std_value_thread, std_ratio_thread):
class1 = self.__read_df(
self.database_info.class1_path,
std_value_thread,
std_ratio_thread
)
class2 = self.__read_df(
self.database_info.class2_path,
std_value_thread,
std_ratio_thread
)
return class1, class2
# Read in gem file
def __read_df(self, path, std_value_thread, std_ratio_thread):
# Decide which seperation mark to use
if re.search(r'csv', path): sep = ','
elif re.search(r'txt', path): sep = '\t'
else: raise lib.Error('Unsupported File Type: ', path)
# Decide which compression method to use
if re.search(r'.gz', path): compression = 'gzip'
else: compression = 'infer'
df = pd.read_csv(
path,
sep = sep,
compression = compression,
header = 0,
index_col = 0
)
return tool.STD_Filter(df, std_value_thread, std_ratio_thread)
# Process in expression matrix file (dataframe) scenario
def __process_mex_folders(self, std_value_thread, std_ratio_thread):
class1 = self.__read_mex(
self.database_info.class1_path,
std_value_thread,
std_ratio_thread
)
class2 = self.__read_mex(
self.database_info.class2_path,
std_value_thread,
std_ratio_thread
)
return class1, class2
# Read in gem files
def __read_mex(self, path, std_value_thread, std_ratio_thread):
keyword = None
matrix_path = None
features_path = None
barcodes_path = None
# get gene id type
if self.database_info.factor_name_type == 'gene_name':
gene_id_type = 1
else:
print('Under Construction')
# check whether input path contains keyword
if path[-1] != '/':
ele = path.split('/')
keyword = ele[-1]
path = ''.join(ele[:-1])
print('Under Construction')
print(path, keyword)
for ele in os.listdir(path):
# skip files without keyword
if keyword is not None and not re.search(keyword, ele): continue
# get matrix path
if re.search(r'matrix.mtx', ele):
matrix_path = path + ele
# get feature path
elif re.search(r'genes.tsv', ele):
features_path = path + ele
# get barcodes path
elif re.search(r'barcodes.tsv', ele):
barcodes_path = path + ele
assert matrix_path is not None
assert features_path is not None
assert barcodes_path is not None
gem = mex.Reader(
matrix_path = matrix_path,
features_path = features_path,
barcodes_path = barcodes_path
)
gem.get_gem(gene_id_type = gene_id_type)
return tool.STD_Filter(gem.data, std_value_thread, std_ratio_thread)
# Process in Database scenario
def __process_gem_folder(self, std_value_thread, std_ratio_thread):
class1 = gem.Folder(self.database_info.class1_path).combine(
std_value_thread = std_value_thread,
std_ratio_thread = std_ratio_thread
)
class2 = gem.Folder(self.database_info.class2_path).combine(
std_value_thread = std_value_thread,
std_ratio_thread = std_ratio_thread
)
return class1, class2
class Process(object):
"""
Seperate sample gene expression data into training sets and testing sets
by given ratio
Then prepare sample data to be ready for training and analysis process
"""
def __init__(self,
database_info = None,
grnData = None,
train_size = 0.7,
ramdom_state = None,
fullData = None,
fullLabel = None,
):
super(Process, self).__init__()
# Initialization
self.train_size = train_size
self.random_state = ramdom_state
self.all_grp_ids = {}
# Go through database_info based protocol
if fullData is None or fullLabel is None:
self.__init_protocol(database_info, grnData)
elif fullData is not None and fullLabel is not None:
self.__iterating_protocool(fullData.to_numpy(), fullLabel)
else:
raise db_setup.Error('Preprocessor Error: case not catched')
# Process in database mode
def __init_protocol(self, database_info, grnData):
# class1Result is [dataTrainC1, dataTestC1, lableTrainC1, labelTestC1]
class1Result = self.__split_train_test(
grnData.class1_psGRNs,
database_info.label1
)
# similar with class1
class2Result = self.__split_train_test(
grnData.class2_psGRNs,
database_info.label2
)
self.labelTrain = np.array(class1Result[2] + class2Result[2])
self.labelTest = np.array(class1Result[3] + class2Result[3])
self.dataTrain = []
self.dataTest = []
# standardize feature data
# to make sure all training and testing samples
# will be in same dimmension
self.__update_train_test(
grnData.class1_psGRNs,
train_set = class1Result[0],
test_set = class1Result[1]
)
self.__update_train_test(
grnData.class2_psGRNs,
train_set = class2Result[0],
test_set = class2Result[1]
)
# Add zeros for position holding
self.__append_zeros(self.dataTrain)
self.__append_zeros(self.dataTest)
# self.__all_grp_id_check()
# Clear unnecessary data
del grnData
del database_info
# Update training and testing set based on given expression data
def __update_train_test(self, grns, train_set, test_set):
for sample in grns:
grps = grns[sample].grps
grps_ids_copy = {ele:None for ele in grps}
values = ''
for id in self.all_grp_ids:
if id in grps:
values += str(list(grps[id].correlations.values())[0]) + ';'
# Update grn_cop if id already in allIDs
del grps_ids_copy[id]
else: values += '0.0;'
for id in grps_ids_copy:
self.all_grp_ids[id] = None
values += str(list(grps[id].correlations.values())[0]) + ';'
# Change every elements into float type
values = list(map(float, values.split(';')[:-1]))
if sample in train_set:
self.dataTrain.append(values)
elif sample in test_set:
self.dataTest.append(values)
# Makke training/testing data and lable arrays based on given full data
def __iterating_protocool(self, fullData, fullLabel):
data = train_test_split(
fullData,
fullLabel,
train_size = self.train_size,
random_state = self.random_state
)
self.dataTrain = data[0]
self.dataTest = data[1]
self.labelTrain = data[2]
self.labelTest = data[3]
# seperate files in given path into training
# and testing sets based on ratio
def __split_train_test(self, grnData, label):
data_sample = []
label_sample = []
for sample in grnData:
data_sample.append(sample)
label_sample.append(label)
return train_test_split(
data_sample,
label_sample,
train_size = self.train_size,
random_state = self.random_state
)
# add zeros to each preprocessed samples
# to make sure all training and testing samples
# will be in same dimmension
def __append_zeros(self, dataset):
for sample in dataset:
if len(sample) != len(self.all_grp_ids):
sample += [0] * (len(self.all_grp_ids) - len(sample))
sample = np.array(sample)
dataset = np.array(dataset)
# Check whether allIDs have dupicate IDs or not
# ToDo: need optimization to get unique ids
def __all_grp_id_check(self):
# check whether allIDs have duplicate elements
unique = [x for i, x in enumerate([*self.all_grp_ids])
if i == [*self.all_grp_ids].index(x)]
if len(self.all_grp_ids) != len(unique):
raise db_setup.Error(
'preprocessor malfunction: duplicate ID in allIDs'
)
del unique
# Calculate a close-to-square matrix size based on allIDs
# for using 2D-input based machinle learning models (e.g. hybrid_model)
def auto_inject_fake_grps(self):
total = len(self.all_grp_ids)
tar = math.sqrt(total)
if int(tar) == tar: return
elif int(tar) < tar:
aim = int(tar) + 1
fakeID = 'FAKE'
fakeID_Num = aim*aim - total
for i in range(fakeID_Num):
id = fakeID + str(i)
self.all_grp_ids[id] = None
self.__append_zeros(self.dataTrain)
self.__append_zeros(self.dataTest) | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/database_setup/binary_class.py | binary_class.py |
import re
import networkx as nx
import statistics as sta
import ageas.tool as tool
import ageas.tool.json as json
class GRN(object):
"""
docstring for Meta_GRN.
"""
def __init__(self, id = None, **kwargs):
super(GRN, self).__init__()
self.id = id
self.genes = dict()
self.grps = dict()
for key in kwargs: setattr(self, key, kwargs[key])
def update_grn(self, source, target, gem1, gem2, correlation_thread):
# Skip if processing self-regulating pathway
if source == target: return
grp_id = GRP(source, target).id
if grp_id in self.grps:
if not self.grps[grp_id].reversable:
self.grps[grp_id].reversable = True
return
# Test out global scale correlation
cor_class1 = None
cor_class2 = None
passed = False
# check cor_class1
if source in gem1.index and target in gem1.index:
cor_class1 = tool.Get_Pearson(
gem1.loc[[source]].values[0],
gem1.loc[[target]].values[0]
)
# check cor_class2
if source in gem2.index and target in gem2.index:
cor_class2 = tool.Get_Pearson(
gem2.loc[[source]].values[0],
gem2.loc[[target]].values[0]
)
# Go through abs(correlation) threshod check
if cor_class1 is None and cor_class2 is None:
return
if cor_class1 is None and abs(cor_class2) > correlation_thread:
passed = True
cor_class1 = 0
elif cor_class2 is None and abs(cor_class1) > correlation_thread:
passed = True
cor_class2 = 0
elif cor_class1 is not None and cor_class2 is not None:
if abs(cor_class1 - cor_class2) > correlation_thread:
passed = True
# update GRN if passed correlation filter
if passed:
correlations = {
'class1':float(cor_class1),
'class2':float(cor_class2)
}
self.add_grp(grp_id, source, target, correlations)
if source not in self.genes:
self.add_gene(source, gem1, gem2)
if target not in self.genes:
self.add_gene(target, gem1, gem2)
def add_grp(self, id, source, target, correlations):
assert id not in self.grps
# may change it to a class later
self.grps[id] = GRP(
id = id,
regulatory_source = source,
regulatory_target = target,
correlations = correlations
)
return
def add_gene(self, id, gem1, gem2):
assert id not in self.genes
# get expression mean values
if id in gem1.index: class1_exp = sta.mean(gem1.loc[[id]].values[0])
else: class1_exp = 0
if id in gem2.index: class2_exp = sta.mean(gem2.loc[[id]].values[0])
else: class2_exp = 0
# may change it to a class later
self.genes[id] = Gene(
id = id,
expression_mean = {
'class1': float(class1_exp),
'class2': float(class2_exp)
}
)
return
def as_dict(self):
return {
'genes': {id:record.as_dict() for id, record in self.genes.items()},
'grps':{id:record.as_dict() for id, record in self.grps.items()}
}
def as_digraph(self, grp_ids = None):
graph = nx.DiGraph()
# Use all GRPs if not further specified
if grp_ids is None: grp_ids = self.grps.keys()
for grp_id in grp_ids:
source = self.grps[grp_id].regulatory_source
target = self.grps[grp_id].regulatory_target
# add regulatory source and target genes to nodes
if not graph.has_node(source):
graph.add_node(source, **self.genes[source].as_dict())
if not graph.has_node(target):
graph.add_node(target, **self.genes[target].as_dict())
# add GRP as an edge
graph.add_edge(source, target, **self.grps[grp_id].as_dict())
if self.grps[grp_id].reversable:
graph.add_edge(target, source, **self.grps[grp_id].as_dict())
return graph
def save_json(self, path):
json.encode(self.as_dict(), path)
return
def load_dict(self, dict):
self.genes = {id: Gene(**dict['genes'][id]) for id in dict['genes']}
self.grps = {id: GRP(**dict['grps'][id]) for id in dict['grps']}
return
class Gene(object):
"""
docstring for Gene.
"""
def __init__(self,
id = None,
type = 'Gene',
expression_mean = None,
**kwargs
):
super(Gene, self).__init__()
self.id = id
self.type = type
self.source = list()
self.target = list()
self.expression_mean = expression_mean
for key in kwargs: setattr(self, key, kwargs[key])
def as_dict(self): return self.__dict__
def add_name(self, name):
if not hasattr(self, 'names'): self.names = list()
self.names.append(name)
def add_ens_id(self, ens_id):
if not hasattr(self, 'ens_ids'): self.ens_ids = list()
self.ens_ids.append(ens_id)
def add_uniprot_id(self, uniprot_id):
if not hasattr(self, 'uniprot_ids'): self.uniprot_ids = list()
self.uniprot_ids.append(uniprot_id)
class GRP(object):
"""
docstring for GRP.
"""
def __init__(self,
regulatory_source = None,
regulatory_target = None,
id = None,
type = None,
score = None,
reversable = False,
correlations = None,
**kwargs
):
super(GRP, self).__init__()
self.id = id
self.type = type
self.score = score
self.reversable = reversable
self.correlations = correlations
self.regulatory_source = regulatory_source
self.regulatory_target = regulatory_target
if self.id is None:
self.id = self.cast_id(regulatory_source, regulatory_target)
for key in kwargs: setattr(self, key, kwargs[key])
def as_dict(self): return self.__dict__
def cast_id(self, source, target):
if source > target: return source + '_' + target
else: return target + '_' + source
class Reader(tool.Reader_Template):
"""
NOTE:! Very outdated !
NOTE:! Very outdated !
NOTE:! Very outdated !
NOTE:! Don't Use !
NOTE:! Don't Use !
NOTE:! Don't Use !
Class to read in scRNA-seq or bulk RNA-seq based Gene Expression Matrices
Only suppordt .cvs and .txt for now
"""
def __init__(self, filename, skipFirst = False, stdevThread = None):
super(Reader, self).__init__()
# Initialization
self.load(filename)
self.entryCoords = {}
self.iteration = 0
# Determine file type
if re.search(r'\.txt', self.filePath): self.split = '\t'
elif re.search(r'\.csv', self.filePath): self.split = ','
else: raise tool.Error(self.filePath, ' is not supported format')
# Skip first line
if skipFirst: line = self.file.readline()
# Iterate through all lines
while(True):
coordinate = self.file.tell()
line = self.file.readline().strip()
# terminate at the end
if line == '':break
# skip comments
elif line[:1] == '#': continue
else:
content = line.split(self.split)
self._processLine(coordinate, content, stdevThread)
# Process information in reading line
def _processLine(self, coordinate, content, stdevThread):
# Check file format
if len(content) < 7:
if content == ['\n']:
raise tool.Error('Bad GRN format: empty line')
else:
raise tool.Error('Fatal GRN format: not enough info')
# Process current record
else:
id = content[0]
if id not in self.entryCoords:
self.entryCoords[id] = coordinate
else:
raise tool.Error('Dulpicate GRP id in GRN: ' + self.filePath)
# Pattern info in each line
def _prepareInfo(self, content):
return {
'id':content[0],
'regulatory_source':content[1],
'sourceGroup':content[2],
'regulatory_target':content[3],
'targetGroup':content[4],
'correlation':float(content[5]),
'attribute':content[6],
}
# Output all Gene Expression data in dict format
def makeGeneExpDict(self, stdevKpRatio):
records = []
for id in self.entryCoords:
_, data = self.get(id)
if stdevKpRatio is not None:
stdev = sta.stdev(data)
records.append([id, stdev, data])
else:
records.append([id, data])
# Filter records based on keep ratio
if stdevKpRatio is not None:
records.sort(key = lambda x:x[1], reverse = True)
records = records[:int(len(records) * stdevKpRatio)]
return {record[0]: record[-1] for record in records}
# Get info of selected id
def get(self, id):
self.file.seek(self.entryCoords[id])
line = self.file.readline().strip()
content = line.split(self.split)
return self._prepareInfo(content)
# For iteration
def __next__(self):
entryKeys = [*self.entryCoords]
if self.iteration == len(entryKeys):
self.iteration = 0
raise StopIteration
else:
id = entryKeys[self.iteration]
self.iteration += 1
return self.get(self, id)
""" Old GEM Reader """
# # Pattern info in each line
# def _prepareInfo(self, content):
# id = content[0].strip().upper()
# data = [float(x) for x in content[1:]]
# return id, data
#
# # Process information in reading line
# def _processLine(self, coordinate, content, stdevThread):
# id, data = self._prepareInfo(content)
# stdev = sta.stdev(data)
# # Filter records based on stdev thredshould
# if stdevThread is None or stdev >= stdevThread:
# if id not in self.entryCoords:
# self.entryCoords[id] = coordinate
# # Keep one with high stdev
# else:
# message = id + 'is duplicated'
# warn(message)
# if stdev > sta.stdev(self.get(id)[1]):
# self.entryCoords[id] = coordinate
# # get back to original position
# self.file.seek(coordinate)
# line = self.file.readline().strip() | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/tool/grn.py | grn.py |
import os
import pandas as pd
import ageas.tool as tool
import ageas.tool.json as json
class Processor:
"""
Process summarized bioGRID file
"""
def __init__(self, specie_path, path = 'bioGRiD_stratified.js.gz'):
self.path = specie_path + path
data = json.decode(self.path)
self.alias = data['alias']
self.data = data['interactions']
class Reader(tool.Reader_Template):
def __init__(self,
filepath: str,
organism_a_id: str = '10090', # human is 9606, mouse is 10090
organism_b_id = None
):
if organism_b_id is None:
organism_b_id = organism_a_id
self.load(filepath)
self.dict = self._process(org_a = organism_a_id, org_b = organism_b_id)
def _process(self, org_a = None, org_b = None):
result = {
'alias':{},
'interactions':{}
}
# skip headers
while(True):
line = self.file.readline()
if line[:12] == 'INTERACTOR_A': break
# now we read in records
while(True):
line = self.file.readline()
if line == '': break
content = line.split('\t')
assert len(content) == 11
if content[-2].strip() == org_a and content[-1].strip() == org_b:
geneA = content[2].upper()
geneB = content[3].upper()
geneA_alias = content[4].upper()
geneB_alias = content[5].upper()
self.__update_interactions(geneA, geneB, result['interactions'])
self.__update_interactions(geneB, geneA, result['interactions'])
self.__update_alias(geneA, geneA_alias, result['alias'])
self.__update_alias(geneB, geneB_alias, result['alias'])
return result
def __update_interactions(self, key, target, dict):
if key in dict and target not in dict[key]:
dict[key].append(target)
elif key not in dict:
dict[key] = [target]
def __update_alias(self, gene, alias, dict):
if alias == '-': return
all_names = alias.split('|')
for name in all_names:
if name not in dict:
dict[name] = [gene]
else:
if gene not in dict[name]:
dict[name].append(gene)
def save(self, outpath):
json.encode(self.dict, outpath)
""" For example """
# if __name__ == '__main__':
# a = Reader(filepath = 'BIOGRID-ALL-4.4.203.tab.txt')
# a.save('bioGRiD_stratified.js.gz') | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/tool/biogrid.py | biogrid.py |
import os
import re
import pandas as pd
import ageas.tool as tool
from warnings import warn
class Reader(object):
"""
Class to read in scRNA-seq or bulk RNA-seq based Gene Expression Matrices
Only suppordt .cvs and .txt for now
"""
def __init__(self, path:str = None, **kwargs):
super(Reader, self).__init__()
# Decide which seperation mark to use
if re.search(r'csv', path): self.sep = ','
elif re.search(r'txt', path): self.vsep = '\t'
# determine compression method
if re.search(r'.gz', path): self.compression = 'gzip'
elif re.search(r'.zip', path): self.compression = 'zip'
else: self.compression = 'infer'
try:
self.data = pd.read_csv(
path,
sep = self.sep,
compression = self.compression,
**kwargs
)
except Exception as GEM_Reader_Error:
raise tool.Error('Unsupported File Type: ', path)
# filter data frame based on standered deviations
def STD_Filter(self, std_value_thread = None, std_ratio_thread = None):
self.data = tool.STD_Filter(
df = self.data,
std_value_thread = std_value_thread,
std_ratio_thread = std_ratio_thread
)
class Folder(object):
"""
Manipulations on Gene Expressio Matrices in given folder
"""
def __init__(self,
path:str = None,
file_type = 'csv', # type of file considering GEM
compression_method = 'infer', # compression method of files
header_row = 0, # header row for all GEM
index_col = 0, # index column for all GEM
):
self.path = path
self.header_row = header_row
self.index_col = index_col
# file type check
if file_type == 'csv': self.sep = ','
elif file_type == 'txt': self.sep = '\t'
else: raise tool.Error('Folder: Unknown file type')
self.file_type = file_type
self.compression_method = compression_method
# combine all GEMs to one unified GEM
# all GEMs should have exactly same index (Gene list)
def combine(self,
method = 'inner',
keep_repeated_samples = False,
std_value_thread = None,
std_ratio_thread = None,
outpath = None
):
# Grab all necessary samples first
result = None
for filename in os.listdir(self.path):
# Skip files without targeting appendix
if not re.search(self.file_type, filename): continue
filepath = self.path + '/' + filename
gem = pd.read_csv(
filepath,
sep = self.sep,
header = self.header_row,
index_col = self.index_col,
compression = self.compression_method
)
# Initialize output df if still empty
if result is None:
result = gem
continue
if keep_repeated_samples:
result = result.join(gem, how = method)
else:
unique_samples = gem.columns.difference(result.columns)
# if nothing new, move forward
if len(unique_samples) == 0: continue
result = pd.merge(
result,
gem[unique_samples],
left_index = True,
right_index = True,
how = method
)
# Just in case
result = result[~result.index.duplicated(keep='first')]
del gem
# filter by standard deviations if needed
if std_value_thread is not None or std_ratio_thread is not None:
result = tool.STD_Filter(result, std_value_thread, std_ratio_thread)
# return or save matrix
if outpath is None: return result
else:
result.to_csv(
outpath,
sep = self.sep,
compression = self.compression_method
) | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/tool/gem.py | gem.py |
import re
import gzip
from scipy.stats import pearsonr
# Get pearson correlation value while p-value not lower than thread
# Originally pearson p-value thread was 1, which should be inappropriate
def Get_Pearson(source, target, p_thread = 0.05):
pearson = pearsonr(source, target)
if pearson[1] >= p_thread: return 0
else: return pearson[0]
# Standard Deviation (STD) Filter for data frame(df)
def STD_Filter(df, std_value_thread = None, std_ratio_thread = None):
data = df.transpose()
sd_list = data[data.columns].std().sort_values(ascending=False)
# filter by stdev threshod value
if std_value_thread is not None:
for i in range(len(sd_list)):
if sd_list[i] < std_value_thread: break
sd_list = sd_list[:i]
gene_list = list(sd_list.index)
# filter by ratio thread
if std_ratio_thread is not None:
gene_list = gene_list[:int(len(gene_list) * std_ratio_thread)]
# stratify data
data = data[gene_list]
data.columns = data.columns.str.upper()
return data.transpose()
# standardize feature scores applying Z score
def Z_Score_Standardize(df, col):
df[col] = (df[col] - df[col].mean()) / df[col].std(ddof=0)
return df
class Error(Exception):
"""
File processing related error handling
"""
pass
class Reader_Template:
"""
Template for file reading class
"""
def __init__(self, filename):
super(Reader_Template, self).__init__()
self.filePath = filename
self.file = None
# Load in file
def load(self, filename):
self.filePath = filename
# Open as .gz file
if re.search(r'\.gz$', self.filePath):
self.file = gzip.open(self.filePath, 'rt', encoding='utf-8')
# Open directly
else:
self.file = open(self.filePath, 'r')
# Close file reading
def close(self):
self.file.close()
# For iteration
def __iter__(self):
return self
# Need to be override based on need
def __next__(self):
return self | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/tool/__init__.py | __init__.py |
{
"Transformer": {
"epoch": [
2
],
"batch_size": [
5,
10
],
"has_mask": [
true,
false
],
"emsize": [
512,
1024
],
"nhead":[
8,
4
],
"nhid":[
200
],
"nlayers":[
2,
4
],
"learning_rate":[
0.01
],
"dropout":[
0.2
]
},
"RFC": {
"n_estimators": [
100,
50
],
"criterion": [
"gini",
"entropy"
],
"max_features":[
0.01,
"auto",
"log2",
null
]
},
"GNB": {
"var_smoothing": [
1e-9,
1e-6,
1e-3,
1
]
},
"Logit": {
"penalty": [
"l2",
"l1",
"elasticnet",
"none"
],
"solver": [
"newton-cg",
"lbfgs",
"liblinear"
],
"C": [
1.0
]
},
"SVM": {
"kernel": [
"rbf",
"poly",
"linear"
],
"gamma": [
"scale",
"auto"
],
"C": [
1.0
],
"degree": [
3
],
"cache_size": [
500
],
"probability": [
true
]
},
"GBM": {
"booster": [
"gbtree"
],
"objective": [
"multi:softmax",
"binary:logistic"
],
"eval_metric": [
"mlogloss"
],
"eta": [
0.1,
0.3
],
"gamma": [
0,
0.1
],
"max_depth": [
6
],
"min_child_weight": [
1,
2
],
"alpha": [
0
]
},
"CNN_Hybrid":{
"epoch": [
2
],
"batch_size": [
5,
10
],
"matrix_size": [
[
292,
292
]
],
"conv_kernel_num": [
32,
64
],
"maxpool_kernel_size": [
2,
3
],
"densed_size": [
64,
128
],
"num_layers": [
1,
2
],
"learning_rate": [
0.1
]
},
"CNN_1D": {
"epoch": [
2
],
"batch_size": [
5,
10
],
"conv_kernel_size": [
32
],
"conv_kernel_num": [
32,
64
],
"maxpool_kernel_size": [
2,
3
],
"densed_size": [
64,
128
],
"num_layers": [
1,
2
],
"learning_rate": [
0.1
]
},
"RNN": {
"epoch": [
2
],
"batch_size": [
5,
10
],
"hidden_size":[
128,
256
],
"num_layers":[
2,
3
],
"learning_rate":[
0.01
],
"dropout":[
0.2
]
},
"LSTM": {
"epoch": [
2
],
"batch_size": [
5,
10
],
"hidden_size":[
128,
256
],
"num_layers":[
2,
3
],
"learning_rate":[
0.01
],
"dropout":[
0.2
]
},
"GRU": {
"epoch": [
2
],
"batch_size": [
5,
10
],
"hidden_size":[
128,
256
],
"num_layers":[
2,
3
],
"learning_rate":[
0.01
],
"dropout":[
0.2
]
}
} | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/data/config/long_list_config.js | long_list_config.js |
{
"Transformer": {
"epoch": [
2
],
"batch_size": [
5,
10
],
"has_mask": [
true,
false
],
"emsize": [
512,
1024
],
"nhead":[
8,
4
],
"nhid":[
200
],
"nlayers":[
2,
4
],
"learning_rate":[
0.01
],
"dropout":[
0.2
]
},
"RFC": {
"n_estimators": [
100
],
"criterion": [
"gini",
"entropy"
],
"max_features":[
"auto"
]
},
"SVM": {
"kernel": [
"linear"
],
"gamma": [
"scale",
"auto"
],
"C": [
1.0
],
"degree": [
3
],
"cache_size": [
500
],
"probability": [
true
]
},
"GBM": {
"booster": [
"gbtree"
],
"objective": [
"multi:softmax",
"binary:logistic"
],
"eval_metric": [
"mlogloss"
],
"eta": [
0.1,
0.3
],
"gamma": [
0,
0.1
],
"max_depth": [
6
],
"min_child_weight": [
1,
2
],
"alpha": [
0
]
},
"CNN_Hybrid":{
"epoch": [
2
],
"batch_size": [
5,
10
],
"matrix_size": [
[
292,
292
]
],
"conv_kernel_num": [
32,
64
],
"maxpool_kernel_size": [
2,
3
],
"densed_size": [
64,
128
],
"num_layers": [
1,
2
],
"learning_rate": [
0.1
]
},
"CNN_1D": {
"epoch": [
2
],
"batch_size": [
5,
10
],
"conv_kernel_size": [
32
],
"conv_kernel_num": [
32,
64
],
"maxpool_kernel_size": [
2,
3
],
"densed_size": [
64,
128
],
"num_layers": [
1,
2
],
"learning_rate": [
0.1
]
},
"RNN": {
"epoch": [
2
],
"batch_size": [
5,
10
],
"hidden_size":[
128
],
"num_layers":[
2,
3
],
"learning_rate":[
0.01
],
"dropout":[
0.2
]
},
"LSTM": {
"epoch": [
2
],
"batch_size": [
5,
10
],
"hidden_size":[
128
],
"num_layers":[
2,
3
],
"learning_rate":[
0.01
],
"dropout":[
0.2
]
},
"GRU": {
"epoch": [
2
],
"batch_size": [
5,
10
],
"hidden_size":[
128
],
"num_layers":[
2,
3
],
"learning_rate":[
0.01
],
"dropout":[
0.2
]
}
} | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/data/config/list_config.js | list_config.js |
{
"Transformer": {
"pytorch_transformer_0": {
"config": {
"has_mask": false,
"emsize": 1024,
"nhead": 4,
"nhid": 200,
"nlayers": 4,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 10
},
"pytorch_transformer_1": {
"config": {
"has_mask": false,
"emsize": 1024,
"nhead": 4,
"nhid": 200,
"nlayers": 4,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 5
},
"pytorch_transformer_2": {
"config": {
"has_mask": false,
"emsize": 1024,
"nhead": 4,
"nhid": 200,
"nlayers": 2,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 10
},
"pytorch_transformer_3": {
"config": {
"has_mask": false,
"emsize": 1024,
"nhead": 4,
"nhid": 200,
"nlayers": 2,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 5
},
"pytorch_transformer_4": {
"config": {
"has_mask": false,
"emsize": 1024,
"nhead": 8,
"nhid": 200,
"nlayers": 4,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 10
},
"pytorch_transformer_5": {
"config": {
"has_mask": false,
"emsize": 1024,
"nhead": 8,
"nhid": 200,
"nlayers": 4,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 5
},
"pytorch_transformer_6": {
"config": {
"has_mask": false,
"emsize": 1024,
"nhead": 8,
"nhid": 200,
"nlayers": 2,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 10
},
"pytorch_transformer_7": {
"config": {
"has_mask": false,
"emsize": 1024,
"nhead": 8,
"nhid": 200,
"nlayers": 2,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 5
},
"pytorch_transformer_8": {
"config": {
"has_mask": false,
"emsize": 512,
"nhead": 4,
"nhid": 200,
"nlayers": 4,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 10
},
"pytorch_transformer_9": {
"config": {
"has_mask": false,
"emsize": 512,
"nhead": 4,
"nhid": 200,
"nlayers": 4,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 5
},
"pytorch_transformer_10": {
"config": {
"has_mask": false,
"emsize": 512,
"nhead": 4,
"nhid": 200,
"nlayers": 2,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 10
},
"pytorch_transformer_11": {
"config": {
"has_mask": false,
"emsize": 512,
"nhead": 4,
"nhid": 200,
"nlayers": 2,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 5
},
"pytorch_transformer_12": {
"config": {
"has_mask": false,
"emsize": 512,
"nhead": 8,
"nhid": 200,
"nlayers": 4,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 10
},
"pytorch_transformer_13": {
"config": {
"has_mask": false,
"emsize": 512,
"nhead": 8,
"nhid": 200,
"nlayers": 4,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 5
},
"pytorch_transformer_14": {
"config": {
"has_mask": false,
"emsize": 512,
"nhead": 8,
"nhid": 200,
"nlayers": 2,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 10
},
"pytorch_transformer_15": {
"config": {
"has_mask": false,
"emsize": 512,
"nhead": 8,
"nhid": 200,
"nlayers": 2,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 5
},
"pytorch_transformer_16": {
"config": {
"has_mask": true,
"emsize": 1024,
"nhead": 4,
"nhid": 200,
"nlayers": 4,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 10
},
"pytorch_transformer_17": {
"config": {
"has_mask": true,
"emsize": 1024,
"nhead": 4,
"nhid": 200,
"nlayers": 4,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 5
},
"pytorch_transformer_18": {
"config": {
"has_mask": true,
"emsize": 1024,
"nhead": 4,
"nhid": 200,
"nlayers": 2,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 10
},
"pytorch_transformer_19": {
"config": {
"has_mask": true,
"emsize": 1024,
"nhead": 4,
"nhid": 200,
"nlayers": 2,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 5
},
"pytorch_transformer_20": {
"config": {
"has_mask": true,
"emsize": 1024,
"nhead": 8,
"nhid": 200,
"nlayers": 4,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 10
},
"pytorch_transformer_21": {
"config": {
"has_mask": true,
"emsize": 1024,
"nhead": 8,
"nhid": 200,
"nlayers": 4,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 5
},
"pytorch_transformer_22": {
"config": {
"has_mask": true,
"emsize": 1024,
"nhead": 8,
"nhid": 200,
"nlayers": 2,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 10
},
"pytorch_transformer_23": {
"config": {
"has_mask": true,
"emsize": 1024,
"nhead": 8,
"nhid": 200,
"nlayers": 2,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 5
},
"pytorch_transformer_24": {
"config": {
"has_mask": true,
"emsize": 512,
"nhead": 4,
"nhid": 200,
"nlayers": 4,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 10
},
"pytorch_transformer_25": {
"config": {
"has_mask": true,
"emsize": 512,
"nhead": 4,
"nhid": 200,
"nlayers": 4,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 5
},
"pytorch_transformer_26": {
"config": {
"has_mask": true,
"emsize": 512,
"nhead": 4,
"nhid": 200,
"nlayers": 2,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 10
},
"pytorch_transformer_27": {
"config": {
"has_mask": true,
"emsize": 512,
"nhead": 4,
"nhid": 200,
"nlayers": 2,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 5
},
"pytorch_transformer_28": {
"config": {
"has_mask": true,
"emsize": 512,
"nhead": 8,
"nhid": 200,
"nlayers": 4,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 10
},
"pytorch_transformer_29": {
"config": {
"has_mask": true,
"emsize": 512,
"nhead": 8,
"nhid": 200,
"nlayers": 4,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 5
},
"pytorch_transformer_30": {
"config": {
"has_mask": true,
"emsize": 512,
"nhead": 8,
"nhid": 200,
"nlayers": 2,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 10
},
"pytorch_transformer_31": {
"config": {
"has_mask": true,
"emsize": 512,
"nhead": 8,
"nhid": 200,
"nlayers": 2,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 5
}
},
"CNN_1D": {
"pytorch_cnn_1d_0": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 64,
"maxpool_kernel_size": 3,
"densed_size": 128,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_1d_1": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 64,
"maxpool_kernel_size": 3,
"densed_size": 128,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_1d_2": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 64,
"maxpool_kernel_size": 3,
"densed_size": 128,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_1d_3": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 64,
"maxpool_kernel_size": 3,
"densed_size": 128,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_1d_4": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 64,
"maxpool_kernel_size": 3,
"densed_size": 64,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_1d_5": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 64,
"maxpool_kernel_size": 3,
"densed_size": 64,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_1d_6": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 64,
"maxpool_kernel_size": 3,
"densed_size": 64,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_1d_7": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 64,
"maxpool_kernel_size": 3,
"densed_size": 64,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_1d_8": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 64,
"maxpool_kernel_size": 2,
"densed_size": 128,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_1d_9": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 64,
"maxpool_kernel_size": 2,
"densed_size": 128,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_1d_10": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 64,
"maxpool_kernel_size": 2,
"densed_size": 128,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_1d_11": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 64,
"maxpool_kernel_size": 2,
"densed_size": 128,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_1d_12": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 64,
"maxpool_kernel_size": 2,
"densed_size": 64,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_1d_13": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 64,
"maxpool_kernel_size": 2,
"densed_size": 64,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_1d_14": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 64,
"maxpool_kernel_size": 2,
"densed_size": 64,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_1d_15": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 64,
"maxpool_kernel_size": 2,
"densed_size": 64,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_1d_16": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 32,
"maxpool_kernel_size": 3,
"densed_size": 128,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_1d_17": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 32,
"maxpool_kernel_size": 3,
"densed_size": 128,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_1d_18": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 32,
"maxpool_kernel_size": 3,
"densed_size": 128,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_1d_19": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 32,
"maxpool_kernel_size": 3,
"densed_size": 128,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_1d_20": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 32,
"maxpool_kernel_size": 3,
"densed_size": 64,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_1d_21": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 32,
"maxpool_kernel_size": 3,
"densed_size": 64,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_1d_22": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 32,
"maxpool_kernel_size": 3,
"densed_size": 64,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_1d_23": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 32,
"maxpool_kernel_size": 3,
"densed_size": 64,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_1d_24": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 32,
"maxpool_kernel_size": 2,
"densed_size": 128,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_1d_25": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 32,
"maxpool_kernel_size": 2,
"densed_size": 128,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_1d_26": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 32,
"maxpool_kernel_size": 2,
"densed_size": 128,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_1d_27": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 32,
"maxpool_kernel_size": 2,
"densed_size": 128,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_1d_28": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 32,
"maxpool_kernel_size": 2,
"densed_size": 64,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_1d_29": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 32,
"maxpool_kernel_size": 2,
"densed_size": 64,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_1d_30": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 32,
"maxpool_kernel_size": 2,
"densed_size": 64,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_1d_31": {
"config": {
"conv_kernel_size": 32,
"conv_kernel_num": 32,
"maxpool_kernel_size": 2,
"densed_size": 64,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
}
},
"CNN_Hybrid": {
"pytorch_cnn_hybrid_0": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 64,
"maxpool_kernel_size": 3,
"densed_size": 128,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_hybrid_1": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 64,
"maxpool_kernel_size": 3,
"densed_size": 128,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_hybrid_2": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 64,
"maxpool_kernel_size": 3,
"densed_size": 128,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_hybrid_3": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 64,
"maxpool_kernel_size": 3,
"densed_size": 128,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_hybrid_4": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 64,
"maxpool_kernel_size": 3,
"densed_size": 64,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_hybrid_5": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 64,
"maxpool_kernel_size": 3,
"densed_size": 64,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_hybrid_6": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 64,
"maxpool_kernel_size": 3,
"densed_size": 64,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_hybrid_7": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 64,
"maxpool_kernel_size": 3,
"densed_size": 64,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_hybrid_8": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 64,
"maxpool_kernel_size": 2,
"densed_size": 128,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_hybrid_9": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 64,
"maxpool_kernel_size": 2,
"densed_size": 128,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_hybrid_10": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 64,
"maxpool_kernel_size": 2,
"densed_size": 128,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_hybrid_11": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 64,
"maxpool_kernel_size": 2,
"densed_size": 128,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_hybrid_12": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 64,
"maxpool_kernel_size": 2,
"densed_size": 64,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_hybrid_13": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 64,
"maxpool_kernel_size": 2,
"densed_size": 64,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_hybrid_14": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 64,
"maxpool_kernel_size": 2,
"densed_size": 64,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_hybrid_15": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 64,
"maxpool_kernel_size": 2,
"densed_size": 64,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_hybrid_16": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 32,
"maxpool_kernel_size": 3,
"densed_size": 128,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_hybrid_17": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 32,
"maxpool_kernel_size": 3,
"densed_size": 128,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_hybrid_18": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 32,
"maxpool_kernel_size": 3,
"densed_size": 128,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_hybrid_19": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 32,
"maxpool_kernel_size": 3,
"densed_size": 128,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_hybrid_20": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 32,
"maxpool_kernel_size": 3,
"densed_size": 64,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_hybrid_21": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 32,
"maxpool_kernel_size": 3,
"densed_size": 64,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_hybrid_22": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 32,
"maxpool_kernel_size": 3,
"densed_size": 64,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_hybrid_23": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 32,
"maxpool_kernel_size": 3,
"densed_size": 64,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_hybrid_24": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 32,
"maxpool_kernel_size": 2,
"densed_size": 128,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_hybrid_25": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 32,
"maxpool_kernel_size": 2,
"densed_size": 128,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_hybrid_26": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 32,
"maxpool_kernel_size": 2,
"densed_size": 128,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_hybrid_27": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 32,
"maxpool_kernel_size": 2,
"densed_size": 128,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_hybrid_28": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 32,
"maxpool_kernel_size": 2,
"densed_size": 64,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_hybrid_29": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 32,
"maxpool_kernel_size": 2,
"densed_size": 64,
"num_layers": 2,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
},
"pytorch_cnn_hybrid_30": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 32,
"maxpool_kernel_size": 2,
"densed_size": 64,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 10
},
"pytorch_cnn_hybrid_31": {
"config": {
"matrix_size": [
292,
292
],
"conv_kernel_num": 32,
"maxpool_kernel_size": 2,
"densed_size": 64,
"num_layers": 1,
"learning_rate": 0.1
},
"epoch": 2,
"batch_size": 5
}
},
"RNN": {
"pytorch_rnn_0": {
"config": {
"hidden_size": 128,
"num_layers": 3,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 10
},
"pytorch_rnn_1": {
"config": {
"hidden_size": 128,
"num_layers": 3,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 5
},
"pytorch_rnn_2": {
"config": {
"hidden_size": 128,
"num_layers": 2,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 10
},
"pytorch_rnn_3": {
"config": {
"hidden_size": 128,
"num_layers": 2,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 5
}
},
"GRU": {
"pytorch_gru_0": {
"config": {
"hidden_size": 128,
"num_layers": 3,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 10
},
"pytorch_gru_1": {
"config": {
"hidden_size": 128,
"num_layers": 3,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 5
},
"pytorch_gru_2": {
"config": {
"hidden_size": 128,
"num_layers": 2,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 10
},
"pytorch_gru_3": {
"config": {
"hidden_size": 128,
"num_layers": 2,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 5
}
},
"LSTM": {
"pytorch_lstm_0": {
"config": {
"hidden_size": 128,
"num_layers": 3,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 10
},
"pytorch_lstm_1": {
"config": {
"hidden_size": 128,
"num_layers": 3,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 5
},
"pytorch_lstm_2": {
"config": {
"hidden_size": 128,
"num_layers": 2,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 10
},
"pytorch_lstm_3": {
"config": {
"hidden_size": 128,
"num_layers": 2,
"learning_rate": 0.01,
"dropout": 0.2
},
"epoch": 2,
"batch_size": 5
}
},
"SVM": {
"sklearn_svc_0": {
"config": {
"kernel": "linear",
"gamma": "auto",
"C": 1.0,
"degree": 0,
"cache_size": 500,
"probability": true
}
},
"sklearn_svc_1": {
"config": {
"kernel": "linear",
"gamma": "scale",
"C": 1.0,
"degree": 0,
"cache_size": 500,
"probability": true
}
}
},
"RFC": {
"sklearn_rfc_0": {
"config": {
"n_estimators": 100,
"criterion": "entropy",
"max_features": "auto"
}
},
"sklearn_rfc_1": {
"config": {
"n_estimators": 100,
"criterion": "gini",
"max_features": "auto"
}
}
},
"GBM": {
"xgboost_gbm_0": {
"config": {
"booster": "gbtree",
"objective": "binary:logistic",
"eval_metric": "mlogloss",
"eta": 0.3,
"gamma": 0.1,
"max_depth": 6,
"min_child_weight": 2,
"alpha": 0
}
},
"xgboost_gbm_1": {
"config": {
"booster": "gbtree",
"objective": "binary:logistic",
"eval_metric": "mlogloss",
"eta": 0.3,
"gamma": 0.1,
"max_depth": 6,
"min_child_weight": 1,
"alpha": 0
}
},
"xgboost_gbm_2": {
"config": {
"booster": "gbtree",
"objective": "binary:logistic",
"eval_metric": "mlogloss",
"eta": 0.3,
"gamma": 0,
"max_depth": 6,
"min_child_weight": 2,
"alpha": 0
}
},
"xgboost_gbm_3": {
"config": {
"booster": "gbtree",
"objective": "binary:logistic",
"eval_metric": "mlogloss",
"eta": 0.3,
"gamma": 0,
"max_depth": 6,
"min_child_weight": 1,
"alpha": 0
}
},
"xgboost_gbm_4": {
"config": {
"booster": "gbtree",
"objective": "binary:logistic",
"eval_metric": "mlogloss",
"eta": 0.1,
"gamma": 0.1,
"max_depth": 6,
"min_child_weight": 2,
"alpha": 0
}
},
"xgboost_gbm_5": {
"config": {
"booster": "gbtree",
"objective": "binary:logistic",
"eval_metric": "mlogloss",
"eta": 0.1,
"gamma": 0.1,
"max_depth": 6,
"min_child_weight": 1,
"alpha": 0
}
},
"xgboost_gbm_6": {
"config": {
"booster": "gbtree",
"objective": "binary:logistic",
"eval_metric": "mlogloss",
"eta": 0.1,
"gamma": 0,
"max_depth": 6,
"min_child_weight": 2,
"alpha": 0
}
},
"xgboost_gbm_7": {
"config": {
"booster": "gbtree",
"objective": "binary:logistic",
"eval_metric": "mlogloss",
"eta": 0.1,
"gamma": 0,
"max_depth": 6,
"min_child_weight": 1,
"alpha": 0
}
},
"xgboost_gbm_8": {
"config": {
"booster": "gbtree",
"objective": "multi:softmax",
"eval_metric": "mlogloss",
"eta": 0.3,
"gamma": 0.1,
"max_depth": 6,
"min_child_weight": 2,
"alpha": 0,
"num_class": 2
}
},
"xgboost_gbm_9": {
"config": {
"booster": "gbtree",
"objective": "multi:softmax",
"eval_metric": "mlogloss",
"eta": 0.3,
"gamma": 0.1,
"max_depth": 6,
"min_child_weight": 1,
"alpha": 0,
"num_class": 2
}
},
"xgboost_gbm_10": {
"config": {
"booster": "gbtree",
"objective": "multi:softmax",
"eval_metric": "mlogloss",
"eta": 0.3,
"gamma": 0,
"max_depth": 6,
"min_child_weight": 2,
"alpha": 0,
"num_class": 2
}
},
"xgboost_gbm_11": {
"config": {
"booster": "gbtree",
"objective": "multi:softmax",
"eval_metric": "mlogloss",
"eta": 0.3,
"gamma": 0,
"max_depth": 6,
"min_child_weight": 1,
"alpha": 0,
"num_class": 2
}
},
"xgboost_gbm_12": {
"config": {
"booster": "gbtree",
"objective": "multi:softmax",
"eval_metric": "mlogloss",
"eta": 0.1,
"gamma": 0.1,
"max_depth": 6,
"min_child_weight": 2,
"alpha": 0,
"num_class": 2
}
},
"xgboost_gbm_13": {
"config": {
"booster": "gbtree",
"objective": "multi:softmax",
"eval_metric": "mlogloss",
"eta": 0.1,
"gamma": 0.1,
"max_depth": 6,
"min_child_weight": 1,
"alpha": 0,
"num_class": 2
}
},
"xgboost_gbm_14": {
"config": {
"booster": "gbtree",
"objective": "multi:softmax",
"eval_metric": "mlogloss",
"eta": 0.1,
"gamma": 0,
"max_depth": 6,
"min_child_weight": 2,
"alpha": 0,
"num_class": 2
}
},
"xgboost_gbm_15": {
"config": {
"booster": "gbtree",
"objective": "multi:softmax",
"eval_metric": "mlogloss",
"eta": 0.1,
"gamma": 0,
"max_depth": 6,
"min_child_weight": 1,
"alpha": 0,
"num_class": 2
}
}
}
} | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/data/config/sample_config.js | sample_config.js |
import torch
import torch.nn as nn
import torch.optim as optim
import ageas.classifier as classifier
class LSTM(nn.Module):
"""
Recurrent neural network (many-to-one)
"""
def __init__(self,
id,
input_size,
num_layers,
hidden_size,
dropout,
learning_rate,
n_class = 2
):
super(LSTM, self).__init__()
self.id = id
self.model_type = 'LSTM'
self.num_layers = num_layers
self.hidden_size = hidden_size
self.dropout = nn.Dropout(p = dropout)
self.lstm = nn.LSTM(
input_size,
self.hidden_size,
self.num_layers,
batch_first = True
)
self.fc = nn.Linear(self.hidden_size, n_class)
self.optimizer = torch.optim.Adam(self.parameters(), lr = learning_rate)
self.loss_func = nn.CrossEntropyLoss()
def forward(self, input):
input = self.dropout(input)
# Set initial hidden and cell states
h0 = torch.zeros(self.num_layers, input.size(0), self.hidden_size)
c0 = torch.zeros(self.num_layers, input.size(0), self.hidden_size)
# Forward propagate LSTM
out, _ = self.lstm(input, (h0, c0))
# out: tensor of shape (batch_size, seq_length, hidden_size)
# Decode the hidden state of the last time step
out = self.fc(out[:, -1, :])
return out
class Make(classifier.Make_Template):
"""
Analysis the performances of LSTM based approaches
with different hyperparameters
Find the top settings to build LSTM
"""
# Perform classifier training process for given times
def train(self, dataSets, test_split_set):
testData = classifier.reshape_tensor(dataSets.dataTest)
testLabel = dataSets.labelTest
num_features = len(dataSets.dataTest[0])
for id in self.configs:
model = LSTM(id, num_features, **self.configs[id]['config'])
epoch = self.configs[id]['epoch']
batch_size = self.configs[id]['batch_size']
self._train_torch(epoch, batch_size, model, dataSets)
accuracy = self._evaluate_torch(
model,
testData,
testLabel,
test_split_set
)
self.models.append([model, accuracy]) | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/classifier/lstm.py | lstm.py |
import math
import torch
import torch.nn as nn
import torch.nn.functional as func
from torch.nn import TransformerEncoder, TransformerEncoderLayer
import ageas.classifier as classifier
class Positional_Encoding(nn.Module):
"""
Inject some information about the relative or absolute position
of the tokens in the sequence.
The positional encodings have the same dimension as the embeddings,
so that the two can be summed.
Here, we use sine and cosine functions of different frequencies.
.. math:
\text{PosEncoder}(pos, 2i) = sin(pos/10000^(2i/d_model))
\text{PosEncoder}(pos, 2i+1) = cos(pos/10000^(2i/d_model))
\text{where pos is the word position and i is the embed idx)
Args:
d_model: the embed dim (required).
dropout: the dropout value (default=0.1).
Examples:
>>> pos_encoder = Positional_Encoding(d_model)
"""
def __init__(self, d_model, dropout = 0.1):
super(Positional_Encoding, self).__init__()
self.dropout = nn.Dropout(p=dropout)
pe = torch.zeros(d_model, d_model)
position = torch.arange(0, d_model, dtype=torch.float).unsqueeze(1)
div_term = torch.exp(torch.arange(0, d_model, 2).float() *
(-math.log(10000.0) / d_model))
pe[:, 0::2] = torch.sin(position * div_term)
pe[:, 1::2] = torch.cos(position * div_term)
pe = pe.unsqueeze(0).transpose(0, 1)
self.register_buffer('pe', pe)
def forward(self, x):
"""
Args:
x: the sequence fed to the positional encoder model (required).
Shape:
x: [sequence length, batch size, embed dim]
output: [sequence length, batch size, embed dim]
Examples:
>>> output = pos_encoder(x)
"""
x = x + self.pe[:x.size(0), :]
return self.dropout(x)
class Transformer(nn.Module):
"""
Container module with an encoder, a transformer module, and a decoder.
"""
def __init__(self,
id, # model id
num_features, # the number of expected features
has_mask = True, # whether using mask or not
emsize = 512, # size after encoder
nhead = 8, # number of heads in the multiheadattention models
nhid = 200, # number of hidden units per layer
nlayers = 2, # number of layers
dropout = 0.5, # dropout ratio
learning_rate = 0.1,
n_class = 2, # number of class for classification
):
super(Transformer, self).__init__()
self.id = id
self.has_mask = has_mask
self.model_type = 'Transformer'
self.num_features = num_features
self.emsize = emsize
self.mask = None
self.encoder = nn.Linear(num_features, emsize)
self.pos_encoder = Positional_Encoding(emsize, dropout)
encoder_layers = TransformerEncoderLayer(emsize, nhead, nhid, dropout)
self.transformer_encoder = TransformerEncoder(encoder_layers, nlayers)
self.decoder = nn.Linear(emsize, n_class)
#self.optimizer = torch.optim.AdamW(self.parameters(), lr=learning_rate)
self.optimizer = None
self.loss_func = nn.CrossEntropyLoss()
# init_weights
initrange = 0.1
nn.init.zeros_(self.decoder.bias)
nn.init.uniform_(self.decoder.weight, -initrange, initrange)
def _make_square_subsequent_mask(self, size):
mask = (torch.triu(torch.ones(size, size)) == 1).transpose(0, 1)
return mask.float().masked_fill(mask == 0, float('-inf')).masked_fill(
mask == 1,
float(0.0)
)
def forward(self, input):
if self.has_mask:
if self.mask is None or self.mask.size(0) != len(input):
self.mask = self._make_square_subsequent_mask(len(input))
else:
self.mask = None
input = self.encoder(input)
input = self.pos_encoder(input)
output = self.transformer_encoder(input, self.mask)
output = torch.flatten(output, start_dim = 1)
output = func.softmax(self.decoder(output), dim = -1)
return output
class Make(classifier.Make_Template):
"""
Analysis the performances of Transformer based approaches
with different hyperparameters
Find the top settings to build
"""
# Perform classifier training process for given times
def train(self, dataSets, test_split_set):
testData = classifier.reshape_tensor(dataSets.dataTest)
testLabel = dataSets.labelTest
num_features = len(dataSets.dataTest[0])
for id in self.configs:
model = Transformer(id, num_features, **self.configs[id]['config'])
epoch = self.configs[id]['epoch']
batch_size = self.configs[id]['batch_size']
self._train_torch(epoch, batch_size, model, dataSets)
# local test
accuracy = self._evaluate_torch(
model,
testData,
testLabel,
test_split_set
)
self.models.append([model, accuracy])
""" For testing """
# if __name__ == '__main__':
# param = {
# 'has_mask': True,
# 'emsize': 512,
# 'nhead': 8,
# 'nhid': 200,
# 'nlayers': 2,
# 'dropout': 0.5,
# 'learning_rate': 0.1
# }
# data = torch.rand((3,1,22090))
# model = Transformer(id = 'a', num_features = 22090, **param)
# model.train()
# if model.optimizer is not None: model.optimizer.zero_grad()
# out = model(data)
# print(out)
# loss = model.loss_func(out, torch.randint(0,1,(3,)))
# loss.backward()
# if model.optimizer is not None: model.optimizer.step() | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/classifier/transformer.py | transformer.py |
import os
import math
import torch
import itertools
import torch.nn as nn
from warnings import warn
import torch.optim as optim
import torch.nn.functional as func
from torch.utils.data import DataLoader
import ageas.classifier as classifier
class Limited(nn.Module):
"""
Defining a CNN model treating input as 2D data
with given hyperparameters
then using 2 1D convolution kernels to generate layers
Layer set number limited to max == 3
"""
def __init__(self, id, param, n_class = 2):
super().__init__()
# Initialization
self.id = id
self.model_type = 'CNN_Hybrid_Limited'
self.matrixSize = param['matrix_size']
self.num_layers = param['num_layers']
self.loss_func = nn.CrossEntropyLoss()
# Layer set 1
self.poolVer = nn.MaxPool2d((1, param['maxpool_kernel_size']))
self.convVer = nn.Conv2d(
1,
param['conv_kernel_num'],
(1, self.matrixSize[1])
)
self.poolHor = nn.MaxPool2d((param['maxpool_kernel_size'], 1))
self.convHor = nn.Conv2d(
1,
param['conv_kernel_num'],
(self.matrixSize[0], 1)
)
self.poolVer1 = nn.MaxPool2d((1, param['maxpool_kernel_size']))
self.convVer1 = nn.Conv2d(
param['conv_kernel_num'],
param['conv_kernel_num'],
(int(
self.matrixSize[1]/pow(param['maxpool_kernel_size'],self.num_layers)
), 1)
)
self.poolHor1 = nn.MaxPool2d((param['maxpool_kernel_size'], 1))
self.convHor1 = nn.Conv2d(
param['conv_kernel_num'],
param['conv_kernel_num'],
(1, int(
self.matrixSize[0]/pow(param['maxpool_kernel_size'],self.num_layers)
))
)
# Layer set 3
self.poolVer2 = nn.MaxPool2d((1, param['maxpool_kernel_size']))
self.convVer2 = nn.Conv2d(
param['conv_kernel_num'],
param['conv_kernel_num'],
(int(
self.matrixSize[1]/pow(param['maxpool_kernel_size'],self.num_layers)
), 1)
)
self.poolHor2 = nn.MaxPool2d((param['maxpool_kernel_size'], 1))
self.convHor2 = nn.Conv2d(
param['conv_kernel_num'],
param['conv_kernel_num'],
(1, int(
self.matrixSize[0]/pow(param['maxpool_kernel_size'],self.num_layers)
))
)
### Same problem as 1D model ###
# flattenLength = int(featureNum / pow(maxpool_kernel_size, num_layers))
# self.dense = nn.Linear(flattenLength, densed_size)
self.dense = nn.LazyLinear(param['densed_size'])
self.decision = nn.Linear(param['densed_size'], n_class)
self.optimizer = optim.SGD(self.parameters(), param['learning_rate'])
# Overwrite the forward function in nn.Module
def forward(self, input):
input = self.reshape(input)
temp0 = self.poolVer(func.relu(self.convHor(input)))
temp1 = self.poolHor(func.relu(self.convVer(input)))
if self.num_layers > 1:
temp0 = self.poolVer1(func.relu(self.convHor1(temp0)))
temp1 = self.poolHor1(func.relu(self.convVer1(temp1)))
if self.num_layers > 2:
temp0 = self.poolVer2(func.relu(self.convHor2(temp0)))
temp1 = self.poolHor2(func.relu(self.convVer2(temp1)))
if self.num_layers > 3:
raise classifier.Error('CNN Model with more than 3 layer sets')
temp0 = torch.flatten(temp0, start_dim = 1)
temp1 = torch.flatten(temp1, start_dim = 1)
input = torch.cat((temp0, temp1), dim = 1)
input = func.relu(self.dense(input))
input = func.softmax(self.decision(input),dim = 1)
return input
# transform input(1D) into a 2D matrix
def reshape(self, input):
return torch.reshape(input, (input.shape[0], input.shape[1],
self.matrixSize[0], self.matrixSize[1]))
class Unlimited(nn.Module):
"""
Defining a CNN model treating input as 2D data
with given hyperparameters
then using 2 1D convolution kernels to generate layers
"""
def __init__(self, id, param):
super().__init__()
# Initialization
self.id = id
self.model_type = 'CNN_Hybrid_Unlimited'
self.mat_size = param['matrix_size']
self.num_layers = param['num_layers']
self.loss_func = nn.CrossEntropyLoss()
self.pool0 = nn.MaxPool2d((1, param['maxpool_kernel_size']))
self.pool1 = nn.MaxPool2d((param['maxpool_kernel_size'], 1))
dividen = pow(param['maxpool_kernel_size'], self.num_layers)
self.conv0 = nn.Conv2d(
1,
param['conv_kernel_num'],
(self.mat_size[0],1)
)
self.conv0_recur = nn.Conv2d(
param['conv_kernel_num'],
param['conv_kernel_num'],
(1, max(1, int(self.mat_size[0] / dividen)))
)
self.conv1 = nn.Conv2d(
1,
param['conv_kernel_num'],
(1,self.mat_size[1])
)
self.conv1_recur = nn.Conv2d(
param['conv_kernel_num'],
param['conv_kernel_num'],
(max(1, int(self.mat_size[1] / dividen)), 1)
)
### Same problem as 1D model ###
# flattenLength = int(featureNum / pow(maxpool_kernel_size, num_layers))
# self.dense = nn.Linear(flattenLength, densed_size)
self.dense = nn.LazyLinear(param['densed_size'])
self.decision = nn.Linear(param['densed_size'], n_class)
self.optimizer = optim.SGD(self.parameters(), param['learning_rate'])
# Overwrite the forward function in nn.Module
def forward(self, input):
input = self.reshape(input)
temp0 = self.pool0(func.relu(self.conv0(input)))
for i in range(self.num_layers - 1):
temp0 = self.pool0(func.relu(self.conv0_recur(temp0)))
temp0 = torch.flatten(temp0, start_dim = 1)
temp1 = self.pool1(func.relu(self.conv1(input)))
for i in range(self.num_layers - 1):
temp1 = self.pool1(func.relu(self.conv1_recur(temp1)))
temp1 = torch.flatten(temp1, start_dim = 1)
input = torch.cat((temp0, temp1), dim = 1)
input = func.relu(self.dense(input))
input = func.softmax(self.decision(input),dim = 1)
return input
# transform input(1D) into a 2D matrix
def reshape(self, input):
return torch.reshape(input, (input.shape[0], input.shape[1],
self.mat_size[0], self.mat_size[1]))
class Make(classifier.Make_Template):
"""
Analysis the performances of CNN based approaches
with different hyperparameters
Find the top settings to build CNN
"""
def __init__(self, config, cpu_mode, grp_amount):
self.configs = config
self.__check_input_matrix_size(grp_amount)
self.models = []
self.cpu_mode = cpu_mode
# Perform classifier training process for given times
def train(self, dataSets, test_split_set):
testData = classifier.reshape_tensor(dataSets.dataTest)
testLabel = dataSets.labelTest
num_features = len(dataSets.dataTest[0])
for id in self.configs:
if self.configs[id]['config']['num_layers'] < 3:
model = Limited(id, self.configs[id]['config'])
else:
model = Unlimited(id, self.configs[id]['config'])
epoch = self.configs[id]['epoch']
batch_size = self.configs[id]['batch_size']
self._train_torch(epoch, batch_size, model, dataSets)
accuracy = self._evaluate_torch(
model,
testData,
testLabel,
test_split_set
)
self.models.append([model, accuracy])
# Check whether matrix sizes are reasonable or not
def __check_input_matrix_size(self, grp_amount):
matrix_dim = int(math.sqrt(grp_amount))
# m is square shaped data dimmensions
square_size = [matrix_dim, matrix_dim]
for id in self.configs:
mat_size = self.configs[id]['config']['matrix_size']
if mat_size is not None:
if mat_size[0] * mat_size[1] != grp_amount:
warn('Ignored illegal matrixsize config:' + str(mat_size))
self.configs[id]['config']['matrix_size'] = square_size
elif mat_size is None:
warn('No valid matrix size in config')
warn('Using 1:1 matrix size: ' + str(idealMatSize))
self.configs[id]['config']['matrix_size'] = square_size
if len(mat_size) != 2:
warn('No valid matrix size in config')
warn('Using 1:1 matrix size: ' + str(idealMatSize))
self.configs[id]['config']['matrix_size'] = square_size | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/classifier/cnn_hybrid.py | cnn_hybrid.py |
import torch
import torch.nn as nn
import torch.optim as optim
import ageas.classifier as classifier
class RNN(nn.Module):
def __init__(self,
id,
input_size,
num_layers,
hidden_size,
dropout,
learning_rate,
n_class = 2
):
super(RNN, self).__init__()
self.id = id
self.model_type = 'RNN'
self.num_layers = num_layers
self.hidden_size = hidden_size
self.dropout = nn.Dropout(p = dropout)
self.rnn = nn.RNN(
input_size,
self.hidden_size,
self.num_layers,
batch_first = True
)
self.fc = nn.Linear(self.hidden_size, n_class)
self.optimizer = torch.optim.Adam(self.parameters(), lr = learning_rate)
self.loss_func = nn.CrossEntropyLoss()
def forward(self, input):
input = self.dropout(input)
# Set initial hidden states (and cell states for LSTM)
h0 = torch.zeros(self.num_layers, input.size(0), self.hidden_size)
out, _ = self.rnn(input, h0)
# out: tensor of shape (batch_size, seq_length, hidden_size)
# Decode the hidden state of the last time step
out = self.fc(out[:, -1, :])
return out
class Make(classifier.Make_Template):
"""
Analysis the performances of RNN based approaches
with different hyperparameters
Find the top settings to build
"""
# Perform classifier training process for given times
def train(self, dataSets, test_split_set):
testData = classifier.reshape_tensor(dataSets.dataTest)
testLabel = dataSets.labelTest
num_features = len(dataSets.dataTest[0])
for id in self.configs:
model = RNN(id, num_features, **self.configs[id]['config'])
epoch = self.configs[id]['epoch']
batch_size = self.configs[id]['batch_size']
self._train_torch(epoch, batch_size, model, dataSets)
# local test
accuracy = self._evaluate_torch(
model,
testData,
testLabel,
test_split_set
)
self.models.append([model, accuracy]) | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/classifier/rnn.py | rnn.py |
import os
import torch
import itertools
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as func
import ageas.classifier as classifier
class Limited(nn.Module):
"""
Defining a CNN model treating input as 1D data
with given hyperparameters
Layer set number limited to max == 3
"""
def __init__(self, id, param, n_class = 2):
# Initialization
super().__init__()
self.id = id
self.model_type = 'CNN_1D_Limited'
self.num_layers = param['num_layers']
self.loss_func = nn.CrossEntropyLoss()
# Layer set 1
self.pool = nn.MaxPool1d(param['maxpool_kernel_size'])
self.conv = nn.Conv1d(
1,
param['conv_kernel_num'],
param['conv_kernel_size']
)
# Layer set 2
self.pool1 = nn.MaxPool1d(param['maxpool_kernel_size'])
self.conv1 = nn.Conv1d(
param['conv_kernel_num'],
param['conv_kernel_num'],
param['conv_kernel_size']
)
# Layer set 3
self.pool2 = nn.MaxPool1d(param['maxpool_kernel_size'])
self.conv2 = nn.Conv1d(
param['conv_kernel_num'],
param['conv_kernel_num'],
param['conv_kernel_size']
)
### Trying to avoid Lazy module since it's under development ###
### But so far it's working just fine, so still using lazy module ###
# flattenLength = int(featureNum / pow(maxpool_kernel_size, num_layers))
# self.dense = nn.Linear(flattenLength, densed_size)
### -------------------------------------------------------- ###
self.dense = nn.LazyLinear(param['densed_size'])
self.decision = nn.Linear(param['densed_size'], n_class)
self.optimizer = optim.SGD(self.parameters(), param['learning_rate'])
# Overwrite the forward function in nn.Module
def forward(self, input):
input = self.pool(func.relu(self.conv(input)))
if self.num_layers > 1:
input = self.pool1(func.relu(self.conv1(input)))
if self.num_layers > 2:
input = self.pool2(func.relu(self.conv2(input)))
if self.num_layers > 3:
raise classifier.Error('CNN Model with more than 3 layer sets')
input = torch.flatten(input, start_dim = 1)
input = func.relu(self.dense(input))
input = func.softmax(self.decision(input), dim = 1)
return input
class Unlimited(nn.Module):
"""
Defining a CNN model treating input as 1D data
with given hyperparameters
"""
def __init__(self, id, param, n_class = 2):
# Initialization
super().__init__()
self.id = id
self.model_type = 'CNN_1D_Unlimited'
self.num_layers = param['num_layers']
self.loss_func = nn.CrossEntropyLoss()
self.conv = nn.Conv1d(
1,
param['conv_kernel_num'],
param['conv_kernel_size']
)
self.convMore = nn.Conv1d(
param['conv_kernel_num'],
param['conv_kernel_num'],
param['conv_kernel_size']
)
self.pool = nn.MaxPool1d(param['maxpool_kernel_size'])
### Trying to avoid Lazy module since it's under development ###
### But so far it's working just fine, so still using lazy module ###
# flattenLength = int(featureNum / pow(maxpool_kernel_size, num_layers))
# self.dense = nn.Linear(flattenLength, densed_size)
### -------------------------------------------------------- ###
self.dense = nn.LazyLinear(param['densed_size'])
self.decision = nn.Linear(param['densed_size'], n_class)
self.optimizer = optim.SGD(self.parameters(), param['learning_rate'])
# Overwrite the forward function in nn.Module
def forward(self, input):
input = self.pool(func.relu(self.conv(input)))
for i in range(self.num_layers - 1):
input = self.pool(func.relu(self.convMore(input)))
input = torch.flatten(input, start_dim = 1)
input = func.relu(self.dense(input))
input = func.softmax(self.decision(input), dim = 1)
return input
class Make(classifier.Make_Template):
"""
Analysis the performances of CNN based approaches
with different hyperparameters
Find the top settings to build CNN
"""
# Perform classifier training process for given times
def train(self, dataSets, test_split_set):
testData = classifier.reshape_tensor(dataSets.dataTest)
testLabel = dataSets.labelTest
num_features = len(dataSets.dataTest[0])
for id in self.configs:
if self.configs[id]['config']['num_layers'] < 3:
model = Limited(id, self.configs[id]['config'])
else:
model = Unlimited(id, self.configs[id]['config'])
epoch = self.configs[id]['epoch']
batch_size = self.configs[id]['batch_size']
self._train_torch(epoch, batch_size, model, dataSets)
accuracy = self._evaluate_torch(
model,
testData,
testLabel,
test_split_set
)
self.models.append([model, accuracy]) | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/classifier/cnn_1d.py | cnn_1d.py |
import torch
import difflib
import itertools
from torch.utils.data import DataLoader
from sklearn.naive_bayes import GaussianNB
# Cast input data into tensor format
# Then reshape the data in format of [#data, 1(only 1 chgannel), len(data)]
def reshape_tensor(data):
return torch.reshape(
torch.tensor(data, dtype = torch.float),
(len(data), 1, len(data[0]))
)
class Error(Exception):
"""
Classifier related error handling
"""
pass
class Sklearn_Template:
"""
Build up sklearn-style general classifier based on given parameters
Gaussian Naive Bayes is used as example here
"""
def __init__(self, id, param):
super(Sklearn_Template, self).__init__()
self.id = id
self.param = param
self.initial()
def train(self, dataTrain = None, labelTrain = None):
self.clf.fit(dataTrain, labelTrain)
# Set clf to default level
def initial(self): self.clf = GaussianNB(**param)
class Make_Template:
"""
Analysis the performances of models with different hyperparameters
Find the top settings to build models
"""
def __init__(self, config = None, cpu_mode = False):
super(Make_Template, self).__init__()
self.models = []
self.configs = config
self.cpu_mode = cpu_mode
# Perform classifier training process for given times
# and keep given ratio of top performing classifiers
def train(self, dataSets, clf_keep_ratio, clf_accuracy_thread):
return self
# Filter models based on checking accuracy (or ranking)
def _filter_models(self, clf_keep_ratio = None, clf_accuracy_thread = None):
# nothing to do
if ((clf_keep_ratio is None and clf_accuracy_thread is None)
or (len(self.models) <= 1)): return
# Or we do the job
accuracy_thread = None
self.models.sort(key = lambda x:x[-1], reverse = True)
if clf_keep_ratio is not None:
index = int(len(self.models) * clf_keep_ratio) - 1
accuracy_thread = self.models[index][-1]
if clf_accuracy_thread is not None:
if accuracy_thread is None:
accuracy_thread = clf_accuracy_thread
else:
accuracy_thread = min(accuracy_thread, clf_accuracy_thread)
# now we partition
if accuracy_thread > self.models[0][-1]:
print('accuracy_thread is too high! Returning the best we can get')
accuracy_thread = self.models[0][-1]
print('accuracy_thread is set to:', accuracy_thread)
low_bound = len(self.models)
for i in reversed(range(low_bound)):
accuracy = self.models[i][-1]
if accuracy >= accuracy_thread:break
low_bound -= 1
self.models = self.models[:low_bound]
return
# generalized pytorch model training process
def _train_torch(self, epoch, batch_size, model, dataSets):
if self.cpu_mode or not torch.cuda.is_available():
device = torch.device('cpu')
else:
device = torch.device('cuda')
for ep in range(epoch):
index_set = DataLoader(
dataset = range(len(dataSets.dataTrain)),
batch_size = batch_size,
shuffle = True
)
for index in index_set:
index = index.tolist()
data = [dataSets.dataTrain[i] for i in index]
label = [dataSets.labelTrain[i] for i in index]
batch_data = reshape_tensor(data).to(device)
batch_label = torch.tensor(label).to(device)
# pass model to device
if torch.cuda.device_count() > 1:
model = torch.nn.DataParallel(model)
else:
model.to(device)
# set model to training model
model.train()
if model.optimizer is not None: model.optimizer.zero_grad()
outputs = model(batch_data)
loss = model.loss_func(outputs, batch_label)
loss.backward()
if model.optimizer is not None: model.optimizer.step()
# Evaluate the accuracy of given model with testing data
def _evaluate_torch(self, model, testData, testLabel, do_test):
accuracy = None
if do_test:
model.eval()
with torch.no_grad():
outputs = model(testData)
correct = 0
for i in range(len(outputs)):
if outputs[i][0] > outputs[i][1]: predict = 0
else: predict = 1
if predict == testLabel[i]: correct += 1
accuracy = correct / len(testLabel)
return accuracy
# Evaluate the accuracy of given sklearn style model with testing data
def _evaluate_sklearn(self, model, testData, testLabel, do_test):
accuracy = None
if do_test:
accuracy = difflib.SequenceMatcher(
None,
model.clf.predict(testData),
testLabel
).ratio()
return accuracy
# stop epoch when no improvement on loss
def _early_stopping(self,):
print('under construction') | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/classifier/__init__.py | __init__.py |
import torch
import torch.nn as nn
import torch.optim as optim
import ageas.classifier as classifier
class GRU(nn.Module):
def __init__(self,
id,
input_size,
num_layers,
hidden_size,
dropout,
learning_rate,
n_class = 2
):
super(GRU, self).__init__()
self.id = id
self.model_type = 'GRU'
self.num_layers = num_layers
self.hidden_size = hidden_size
self.dropout = nn.Dropout(p = dropout)
self.gru = nn.GRU(
input_size,
self.hidden_size,
self.num_layers,
batch_first = True
)
self.fc = nn.Linear(self.hidden_size, n_class)
self.optimizer = torch.optim.Adam(self.parameters(), lr = learning_rate)
self.loss_func = nn.CrossEntropyLoss()
def forward(self, input):
input = self.dropout(input)
# Set initial hidden states (and cell states for LSTM)
# input needs to be: (batch_size, seq, input_size)
h0 = torch.zeros(self.num_layers, input.size(0), self.hidden_size)
out, _ = self.gru(input, h0)
# out: tensor of shape (batch_size, seq_length, hidden_size)
# Decode the hidden state of the last time step
out = self.fc(out[:, -1, :])
return out
class Make(classifier.Make_Template):
"""
Analysis the performances of GRU based approaches
with different hyperparameters
Find the top settings to build GRU
"""
# Perform classifier training process for given times
def train(self, dataSets, test_split_set):
testData = classifier.reshape_tensor(dataSets.dataTest)
testLabel = dataSets.labelTest
num_features = len(dataSets.dataTest[0])
for id in self.configs:
model = GRU(id, num_features, **self.configs[id]['config'])
epoch = self.configs[id]['epoch']
batch_size = self.configs[id]['batch_size']
self._train_torch(epoch, batch_size, model, dataSets)
accuracy = self._evaluate_torch(
model,
testData,
testLabel,
test_split_set
)
self.models.append([model, accuracy]) | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/classifier/gru.py | gru.py |
import itertools
import ageas.lib as lib
from collections import deque
import ageas.tool.json as json
# Paternize default list_config
def List_Config_Reader(path):
config_list = json.decode(path)
result = {}
if 'Transformer' in config_list:
result['Transformer'] = Pytorch_Transformer(
header = 'pytorch_transformer_',
config = config_list['Transformer']
).configs
if 'CNN_1D' in config_list:
result['CNN_1D'] = Pytorch_CNN_1D(
header = 'pytorch_cnn_1d_',
config = config_list['CNN_1D']
).configs
if 'CNN_Hybrid' in config_list:
result['CNN_Hybrid'] = Pytorch_CNN_Hybrid(
header = 'pytorch_cnn_hybrid_',
config = config_list['CNN_Hybrid']
).configs
if 'RNN' in config_list:
result['RNN'] = Pytorch_RNN(
header = 'pytorch_rnn_',
config = config_list['RNN']
).configs
if 'GRU' in config_list:
result['GRU'] = Pytorch_GRU(
header = 'pytorch_gru_',
config = config_list['GRU']
).configs
if 'LSTM' in config_list:
result['LSTM'] = Pytorch_LSTM(
header = 'pytorch_lstm_',
config = config_list['LSTM']
).configs
if 'SVM' in config_list:
result['SVM'] = Sklearn_SVM(
header = 'sklearn_svc_',
config = config_list['SVM']
).configs
if 'RFC' in config_list:
result['RFC'] = Sklearn_RFC(
header = 'sklearn_rfc_',
config = config_list['RFC']
).configs
if 'GNB' in config_list:
result['GNB'] = Sklearn_GNB(
header = 'sklearn_gnb_',
config = config_list['GNB']
).configs
if 'Logit' in config_list:
result['Logit'] = Sklearn_Logit(
header = 'sklearn_logit_',
config = config_list['Logit']
).configs
if 'GBM' in config_list:
result['GBM'] = XGBoost_GBM(
header = 'xgboost_gbm_',
config = config_list['GBM']
).configs
return result
class Sklearn_SVM(lib.Config_Maker_Template):
"""
config maker for sklearn based SVMs
"""
def __init__(self, header = None, config = None):
self.header = header
deq = deque()
combs = list(itertools.product(*config.values()))
for ele in combs:
assert len(config.keys()) == len(ele)
temp = {}
for i in range(len(ele)):
key = list(config.keys())[i]
value = ele[i]
temp[key] = value
temp = self.__verify_config(temp)
if temp is not None:
record = {'config':temp}
if record not in deq: deq.appendleft(record)
self.configs = {self.header + str(i) : deq[i] for i in range(len(deq))}
# verify a SVC config before adding it to set
def __verify_config(self, query):
if 'kernel' in query and query['kernel'] != 'poly':
if 'degree' in query:
query['degree'] = 0
return query
class Sklearn_Logit(lib.Config_Maker_Template):
"""
config maker for sklearn based Logistic Regressions
Note:
https://scikit-learn.org/stable/modules/generated/sklearn.linear_model.LogisticRegression.html?highlight=logistic%20regression#sklearn.linear_model.LogisticRegression
"""
def __init__(self, header = None, config = None):
self.header = header
deq = deque()
combs = list(itertools.product(*config.values()))
for ele in combs:
assert len(config.keys()) == len(ele)
temp = {}
for i in range(len(ele)):
key = list(config.keys())[i]
value = ele[i]
temp[key] = value
temp = self.__verify_config(temp)
if temp is not None:
record = {'config':temp}
if record not in deq: deq.appendleft(record)
self.configs = {self.header + str(i) : deq[i] for i in range(len(deq))}
def __verify_config(self, query):
if 'solver' in query and 'penalty' in query:
if query['solver'] == 'newton-cg':
if query['penalty'] != 'l2' and query['penalty'] != 'none':
return None
elif query['solver'] == 'lbfgs':
if query['penalty'] != 'l2' and query['penalty'] != 'none':
return None
elif query['solver'] == 'liblinear':
if query['penalty'] != 'l2' and query['penalty'] != 'l1':
return None
elif query['solver'] == 'sag':
if query['penalty'] != 'l2' and query['penalty'] != 'none':
return None
return query
class Sklearn_GNB(lib.Config_Maker_Template):
"""
config maker for sklearn based GNBs
"""
def __init__(self, header = None, config = None):
self.header = header
deq = deque()
combs = list(itertools.product(*config.values()))
for ele in combs:
assert len(config.keys()) == len(ele)
temp = {}
for i in range(len(ele)):
key = list(config.keys())[i]
value = ele[i]
temp[key] = value
if temp is not None:
record = {'config':temp}
if record not in deq: deq.appendleft(record)
self.configs = {self.header + str(i) : deq[i] for i in range(len(deq))}
class Sklearn_RFC(Sklearn_GNB):
"""
config maker for sklearn based RFs
"""
def __verify_config(self, query):
return query
class XGBoost_GBM(lib.Config_Maker_Template):
"""
config maker for XGBoost based GBMs
"""
def __init__(self, header = None, config = None):
self.header = header
deq = deque()
combs = list(itertools.product(*config.values()))
for ele in combs:
assert len(config.keys()) == len(ele)
temp = {}
for i in range(len(ele)):
key = list(config.keys())[i]
value = ele[i]
temp[key] = value
temp = self.__verify_config(temp)
if temp is not None:
record = {'config':temp}
if record not in deq: deq.appendleft(record)
self.configs = {self.header + str(i) : deq[i] for i in range(len(deq))}
# verify a GBM config before adding it to set
def __verify_config(self, query):
if 'objective' in query and query['objective'] == 'multi:softmax':
query['num_class'] = 2
return query
class Pytorch_CNN_Hybrid(lib.Config_Maker_Template):
"""
config maker for Pytorch based Hybrid-CNN
"""
def __init__(self, header = None, config = None):
self.epoch = 1
self.batch_size = None
self.header = header
config = self.__resize_config(config)
self.configs = self.__get_configs(config)
# Generate all possible hyperparameter combination
# Check model config file for orders of parameters
def __get_configs(self, config):
deq = deque()
combs = list(itertools.product(*config.values()))
for ele in combs:
assert len(config.keys()) == len(ele)
temp = {}
for i in range(len(ele)):
key = list(config.keys())[i]
value = ele[i]
temp[key] = value
temp = self.__verify_config(temp)
if temp is not None:
# add up epoch and batch_size
for epoch in self.epoch:
for batch_size in self.batch_size:
record = {
'config':temp,
'epoch':epoch,
'batch_size':batch_size
}
if record not in deq: deq.appendleft(record)
return {self.header + str(i) : deq[i] for i in range(len(deq))}
# this should vary with different classes
def __resize_config(self, query):
try:
self.epoch = query['epoch']
self.batch_size = query['batch_size']
del query['epoch']
del query['batch_size']
except Exception as CNN_Config_Maker_Error:
raise
return query
# verify a hybrid CNN config before adding it to set
def __verify_config(self, query):
return query
class Pytorch_CNN_1D(Pytorch_CNN_Hybrid):
"""
config maker for Pytorch based 1D-CNN
"""
# verify a 1D CNN config before adding it to set
def __verify_config(self, query):
return query
class Pytorch_RNN(Pytorch_CNN_Hybrid):
"""
config maker for Pytorch based RNN
"""
def __verify_config(self, query):
return query
class Pytorch_LSTM(Pytorch_CNN_Hybrid):
"""
config maker for Pytorch based RNN
"""
def __verify_config(self, query):
return query
class Pytorch_GRU(Pytorch_CNN_Hybrid):
"""
config maker for Pytorch based RNN
"""
def __verify_config(self, query):
return query
class Pytorch_Transformer(Pytorch_CNN_Hybrid):
"""
config maker for Pytorch based Transformer
"""
def __verify_config(self, query):
return query
""" For test """
if __name__ == "__main__":
path = "../data/config/list_config.js"
result = List_Config_Reader(path)
json.encode(result, 'sample_config.js') | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/lib/config_maker.py | config_maker.py |
import os
import copy
import statistics as sta
import ageas.tool as tool
import ageas.tool.grn as grn
import ageas.tool.gem as gem
import ageas.tool.json as json
from scipy.stats import pearsonr
class Make:
"""
Make grns for gene expression datas
"""
def __init__(self,
database_info = None,
std_value_thread = None,
std_ratio_thread = None,
correlation_thread = 0.2,
gem_data = None,
meta_grn = None,
load_path = None
):
super(Make, self).__init__()
# Initialize
self.database_info = database_info
self.std_value_thread = std_value_thread
self.std_ratio_thread = std_ratio_thread
self.correlation_thread = correlation_thread
if self.correlation_thread is None: self.correlation_thread = 0
# load in
if load_path is not None:
self.class1_psGRNs,self.class2_psGRNs= self.__load_psGRNs(load_path)
# Make GRNs
else:
self.class1_psGRNs, self.class2_psGRNs = self.__make_psGRNs(
gem_data = gem_data,
meta_grn = meta_grn
)
# main controller to cast pseudo cell GRNs (psGRNs)
def __make_psGRNs(self, gem_data, meta_grn):
if gem_data is not None:
class1_psGRNs = self.__loaded_gem_method(
class_type = 'class1',
gem = gem_data.class1,
meta_grn = meta_grn
)
class2_psGRNs = self.__loaded_gem_method(
class_type = 'class2',
gem = gem_data.class2,
meta_grn = meta_grn
)
elif self.database_info.type == 'gem_folders':
class1_psGRNs = self.__folder_method(
'class1',
self.database_info.class1_path,
meta_grn
)
class2_psGRNs = self.__folder_method(
'class2',
self.database_info.class2_path,
meta_grn
)
elif self.database_info.type == 'gem_files':
# need to revise here!
class1_psGRNs = self.__file_method(
'class1',
self.database_info.class1_path,
meta_grn
)
class2_psGRNs = self.__file_method(
'class2',
self.database_info.class2_path,
meta_grn
)
else:
raise tool.Error('psGRN Caster Error: Unsupported database type')
return class1_psGRNs, class2_psGRNs
# as named
def __file_method(self, class_type, path, meta_grn):
psGRNs = dict()
print('psgrn_caster.py:class Make: need to do something here')
return psGRNs
# as named
def __loaded_gem_method(self, class_type, gem, meta_grn):
psGRNs = dict()
sample_num = 0
start = 0
end = self.database_info.sliding_window_size
# set stride
if self.database_info.sliding_window_stride is not None:
stride = self.database_info.sliding_window_stride
else:
stride = end
# use sliding window techinque to set pseudo samples
loop = True
while loop:
if start >= len(gem.columns):
break
elif end >= len(gem.columns):
end = len(gem.columns)
loop = False
sample_id = 'sample' + str(sample_num)
sample = gem.iloc[:, start:end]
if meta_grn is not None:
pseudo_sample = self.__process_sample_with_metaGRN(
class_type,
sample,
sample_id,
meta_grn
)
else:
pseudo_sample = self.__process_sample_without_metaGRN(
class_type,
sample,
sample_id
)
# Save data into psGRNs
psGRNs[sample_id] = pseudo_sample
start += stride
end += stride
sample_num += 1
return psGRNs
# as named
def __folder_method(self, class_type, path, meta_grn):
data = self.__readin_folder(path)
psGRNs = dict()
for sample_id in data:
if meta_grn is not None:
pseudo_sample = self.__process_sample_with_metaGRN(
class_type,
data[sample_id],
path,
meta_grn
)
else:
pseudo_sample = self.__process_sample_without_metaGRN(
class_type,
data[sample_id],
path
)
# Save data into psGRNs
psGRNs[sample_id] = pseudo_sample
return psGRNs
# as named
def __process_sample_with_metaGRN(self,
class_type,
gem,
sample_id,
meta_grn
):
pseudo_sample = grn.GRN(id = sample_id)
for grp in meta_grn.grps:
source_ID = meta_grn.grps[grp].regulatory_source
target_ID = meta_grn.grps[grp].regulatory_target
try:
source_exp = list(gem.loc[[source_ID]].values[0])
target_exp = list(gem.loc[[target_ID]].values[0])
except:
continue
# No need to compute if one array is constant
if len(set(source_exp)) == 1 or len(set(target_exp)) == 1:
continue
cor = pearsonr(source_exp, target_exp)[0]
if abs(cor) > self.correlation_thread:
if grp not in pseudo_sample.grps:
pseudo_sample.add_grp(
id = grp,
source = source_ID,
target = target_ID,
correlations = {class_type: cor}
)
if source_ID not in pseudo_sample.genes:
pseudo_sample.genes[source_ID] = copy.deepcopy(
meta_grn.genes[source_ID]
)
pseudo_sample.genes[source_ID].expression_mean = {
class_type: float(sta.mean(source_exp))
}
if target_ID not in pseudo_sample.genes:
pseudo_sample.genes[target_ID] = copy.deepcopy(
meta_grn.genes[target_ID]
)
pseudo_sample.genes[target_ID].expression_mean = {
class_type: float(sta.mean(target_exp))
}
return pseudo_sample
# Process data without guidance
# May need to revise later
def __process_sample_without_metaGRN(self, class_type, gem, sample_id):
pseudo_sample = grn.GRN(id = sample_id)
# Get source TF
for source_ID in gem.index:
# Get target gene
for target_ID in gem.index:
if source_ID == target_ID:
continue
grp_id = grn.GRP(source_ID, target_ID).id
if grp_id not in pseudo_sample.grps:
# No need to compute if one array is constant
source_exp = gem[source_ID]
target_exp = gem[target_ID]
if len(set(source_exp)) == 1 or len(set(target_exp)) == 1:
continue
cor = pearsonr(source_exp, target_exp)[0]
if abs(cor) > self.correlation_thread:
if grp not in pseudo_sample.grps:
pseudo_sample.add_grp(
id = grp_id,
source = source_ID,
target = target_ID,
correlations = {class_type: cor}
)
if source_ID not in pseudo_sample.genes:
pseudo_sample.genes[source_ID] = grn.Gene(
id = source_ID,
expression_mean = {
class_type: float(sta.mean(source_exp))
}
)
if target_ID not in pseudo_sample.genes:
pseudo_sample.genes[target_ID] = grn.Gene(
id = target_ID,
expression_mean = {
class_type: float(sta.mean(target_exp))
}
)
else:
pseudo_sample[grp_id] = None
return pseudo_sample
# Readin Gene Expression Matrices in given class path
def __readin_folder(self, path):
result = dict()
for filename in os.listdir(path):
filename = path + '/' + filename
# read in GEM files
temp = gem.Reader(filename, header = 0, index_col = 0)
temp.STD_Filter(
std_value_thread = self.std_value_thread,
std_ratio_thread = self.std_ratio_thread
)
result[filename] = temp.data
return result
# as named
def update_with_remove_list(self, remove_list):
for sample in self.class1_psGRNs:
for id in remove_list:
if id in self.class1_psGRNs[sample].grps:
del self.class1_psGRNs[sample].grps[id]
for sample in self.class2_psGRNs:
for id in remove_list:
if id in self.class2_psGRNs[sample].grps:
del self.class2_psGRNs[sample].grps[id]
return
# temporal psGRN saving method
""" need to be revised later to save psGRNs file by file"""
def save(self, save_path):
json.encode(
{
'class1':{k:v.as_dict() for k,v in self.class1_psGRNs.items()},
'class2':{k:v.as_dict() for k,v in self.class2_psGRNs.items()}
},
save_path
)
return
# load in psGRNs from files
""" need to be revised later with save_psGRNs"""
def __load_psGRNs(self, load_path):
data = json.decode(load_path)
class1_psGRNs = dict()
class2_psGRNs = dict()
for k,v in data['class1'].items():
temp = grn.GRN(id = k)
temp.load_dict(dict = v)
class1_psGRNs[k] = temp
for k,v in data['class2'].items():
temp = grn.GRN(id = k)
temp.load_dict(dict = v)
class2_psGRNs[k] = temp
return class1_psGRNs, class2_psGRNs
# OLD: Save GRN files as js.gz in new folder
# def save_GRN(self, data, save_path):
# for sample in data:
# names = sample.strip().split('/')
# name = names[-1].split('.')[0] + '.js'
# path = '/'.join(names[:-3] + [save_path, names[-2], name])
# # Make dir if dir not exists
# folder = os.path.dirname(path)
# if not os.path.exists(folder):
# os.makedirs(folder)
# # Get GRN and save it
# grn = data[sample]
# json.encode(grn, out = path) | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/lib/psgrn_caster.py | psgrn_caster.py |
import math
import copy
import itertools
import pandas as pd
import ageas.lib as lib
import ageas.tool.grn as grn
from collections import OrderedDict
GRP_TYPES = ['Standard', 'Outer', 'Bridge', 'Mix']
class Extract(object):
"""
Extract Atalas of key Regulons from the most important GRPs
"""
def __init__(self,
correlation_thread:float = 0.2,
grp_importances = None,
score_thread = None,
outlier_grps = {},
top_grp_amount = None
):
super(Extract, self).__init__()
self.regulons = list()
self.regulatory_sources = None
self.grps = grp_importances
self.outlier_grps = outlier_grps
self.correlation_thread = correlation_thread
# will be deleted after recorded in self.regulons
if grp_importances is not None:
self.top_grps=grp_importances.stratify(score_thread, top_grp_amount)
# as named
def change_regulon_list_to_dict(self, header = 'regulon_'):
self.regulons = {header + str(i):e for i, e in enumerate(self.regulons)}
# as named
def build_regulon(self, meta_grn, impact_depth = 3):
# set outlier bonus score to max score of standard GRPs
outlier_bonus_score = self.top_grps.iloc[0]['importance']
# process standard grps
for id in self.top_grps.index:
try:
grp = meta_grn.grps[id]
except Exception:
raise lib.Error('GRP', id, 'not in Meta GRN')
grp.type = GRP_TYPES[0]
grp.score = self.top_grps.loc[id]['importance']
self.update_regulon_with_grp(grp, meta_grn)
del self.top_grps
for id in self.outlier_grps:
try:
grp = meta_grn.grps[id]
except Exception:
raise lib.Error('GRP', id, 'not in Meta GRN')
grp.type = GRP_TYPES[1]
grp.score = self.outlier_grps[id] + outlier_bonus_score
self.update_regulon_with_grp(grp, meta_grn)
# del self.outlier_grps
# link regulons if bridge can be build with factors already in regulons
self.find_bridges(meta_grn = meta_grn)
self.update_genes(impact_depth = impact_depth)
# combine regulons if sharing common genes
def find_bridges(self, meta_grn):
i = 0
j = 1
checked_grps = {}
while True:
if i == len(self.regulons) or j == len(self.regulons): break
combining = False
reg1_genes = self.regulons[i].genes.keys()
reg2_genes = self.regulons[j].genes.keys()
assert len([x for x in reg1_genes if x in reg2_genes]) == 0
for comb in list(itertools.product(reg1_genes, reg2_genes)):
id = grn.GRP(comb[0], comb[1]).id
if id not in checked_grps:
checked_grps[id] = None
if id in meta_grn.grps:
if not combining: combining = True
assert id not in self.regulons[i].grps
meta_grn.grps[id].type = GRP_TYPES[2]
meta_grn.grps[id].score = 0
self.regulons[i].grps[id] = meta_grn.grps[id]
if combining:
self.__combine_regulons(ind_1 = i, ind_2 = j)
del self.regulons[j]
j = i + 1
else:
j += 1
if j == len(self.regulons):
i += 1
j = i + 1
# update and change regulon to dict type and get information for each gene
def update_genes(self, impact_depth):
for regulon in self.regulons:
for grp in regulon.grps.values():
source = grp.regulatory_source
target = grp.regulatory_target
self.__update_regulon_gene_list(
source = source,
target = target,
gene_list = regulon.genes,
reversable = grp.reversable
)
self.regulatory_sources = self.__get_reg_sources(impact_depth)
# Use extra GRPs from meta GRN to link different Regulons
def link_regulon(self,
meta_grn = None,
allowrance:int = 1,
correlation_thread:float = 0.2
):
# initialize
grp_skip_list = {}
for regulon in self.regulons:
for grp_id in regulon.grps:
grp_skip_list[grp_id] = None
combine_list = []
for gene in self.regulatory_sources:
self.__find_bridges_by_gene(
gene,
self.regulatory_sources[gene]['regulon_id'],
meta_grn,
allowrance,
grp_skip_list,
combine_list,
[]
)
for comb in combine_list:
assert len(comb[0]) >= 2
extend_regulon = self.regulons[comb[0][0]]
for i in range(1, len(comb[0])):
self.__combine_regulons(ind_1 = comb[0][0], ind_2 = comb[0][i])
self.regulons[comb[0][i]] = None
for grp_id in comb[1]:
# skip if already added
if grp_id in extend_regulon.grps: continue
# update GRP information and add it to regulon
meta_grn.grps[grp_id].type = GRP_TYPES[2]
meta_grn.grps[grp_id].score = 0
extend_regulon.grps[grp_id] = meta_grn.grps[grp_id]
# update gene list in regulon
source = meta_grn.grps[grp_id].regulatory_source
target = meta_grn.grps[grp_id].regulatory_target
if source not in extend_regulon.genes:
extend_regulon.genes[source] = copy.deepcopy(
meta_grn.genes[source]
)
if target not in extend_regulon.genes:
extend_regulon.genes[target] = copy.deepcopy(
meta_grn.genes[target]
)
self.__update_regulon_gene_list(
source = source,
target = target,
gene_list = extend_regulon.genes,
reversable = meta_grn.grps[grp_id].reversable
)
self.regulons = [e for e in self.regulons if e is not None]
self.regulatory_sources = self.__get_reg_sources()
# find factors by checking and regulaotry target number and impact score
def report(self, meta_grn, impact_score_thread = 0):
df = []
for k, v in self.regulatory_sources.items():
if v['impact_score'] >= impact_score_thread:
v['regulon_id'] = 'regulon_' + str(v['regulon_id'])
exps = meta_grn.genes[k].expression_mean
fc = abs(math.log2( (exps['class1']+1) / (exps['class2']+1) ))
df.append([k] + list(v.values()) + [fc])
df = pd.DataFrame(sorted(df, key=lambda x:x[-1], reverse = True))
df.columns = [
'Gene',
'Regulon',
'Type',
'Source_Num',
'Target_Num',
'Impact_Score',
'LogFC'
]
return df
# recursively add up impact score with GRP linking gene to its target
def __get_impact_genes(self,
regulon = None,
gene:str = None,
depth:int = 3,
dict = None,
prev_cor:float = 1.0,
):
if depth > 0:
depth -= 1
for target in regulon.genes[gene].target:
if len(regulon.genes[target].target) > 0:
# get regulatory correlation strength
link_grp = grn.GRP(gene, target).id
cors = regulon.grps[link_grp].correlations
if cors['class1'] == 0.0 or cors['class2'] == 0.0:
cor = abs(max(cors['class1'], cors['class2']))
elif cors['class1'] != 0.0 and cors['class2'] != 0.0:
cor = (abs(cors['class1']) + abs(cors['class2'])) / 2
else:
raise lib.Error(link_grp, 'Cor in meta GRN is creepy')
# count it if passing conditions
if (target not in dict and
regulon.genes[target].type != GRP_TYPES[2] and
cor * prev_cor > self.correlation_thread):
dict[target] = cor * prev_cor
dict = self.__get_impact_genes(
regulon,
target,
depth,
dict,
cor * prev_cor
)
return dict
# combine regulons in self.regulons by index
def __combine_regulons(self, ind_1, ind_2):
self.regulons[ind_1].grps.update(self.regulons[ind_2].grps)
self.regulons[ind_1].genes.update(self.regulons[ind_2].genes)
# summarize key regulatory sources appearing in regulons
def __get_reg_sources(self, impact_depth = 3):
dict = {}
for regulon_id, regulon in enumerate(self.regulons):
for gene in regulon.genes:
source_num = len(regulon.genes[gene].source)
target_num = len(regulon.genes[gene].target)
if (gene not in dict and
# regulon['genes'][gene]['type'] != GRP_TYPES[2] and
target_num >= 1):
impact = self.__get_impact_genes(
self.regulons[regulon_id],
gene = gene,
depth = impact_depth,
dict = {},
prev_cor = 1.0
)
dict[gene]= {
'regulon_id': regulon_id,
'type': regulon.genes[gene].type,
'source_num': source_num,
'target_num': target_num,
'impact_score': len(impact)
}
elif gene in dict:
raise lib.Error('Repeated Gene in regulons', gene)
# filter by top_grp_amount
return OrderedDict(sorted(
dict.items(),
key = lambda x:x[-1]['target_num'],
reverse = True
))
# Find potential bridge GRPs with specific gene to link 2 regulons
def __find_bridges_by_gene(self,
gene,
from_regulon,
meta_grn,
allowrance,
grp_skip_list,
combine_list,
prev_grps
):
# last round to attempt find a bridge
if allowrance == 0:
for anchor, record in self.regulatory_sources.items():
if record['regulon_id'] == from_regulon: continue
# presume GRP which could link regulons
grp_id = grn.GRP(gene, anchor).id
if grp_id not in grp_skip_list and grp_id in meta_grn.grps:
anchor_reg_id = record['regulon_id']
prev_grps.append(grp_id)
# add grp to grp_skip_list
for id in prev_grps: grp_skip_list[id] = None
self.__update_combine_list(
reg_id1 = from_regulon,
reg_id2 = anchor_reg_id,
grp_ids = prev_grps,
combine_list = combine_list
)
elif allowrance > 0:
for grp_id, grp in meta_grn.grps.items():
if grp_id in grp_skip_list: continue
if grp.regulatory_source == gene:
new = grp.regulatory_target
elif grp.regulatory_target == gene:
new = grp.regulatory_source
else: continue
if (new in self.regulatory_sources and
self.regulatory_sources[new]['regulon_id'] != from_regulon):
anchor_reg_id = self.regulatory_sources[new]['regulon_id']
prev_grps.append(grp_id)
# add grp to grp_skip_list
for id in prev_grps: grp_skip_list[id] = None
self.__update_combine_list(
reg_id1 = from_regulon,
reg_id2 = anchor_reg_id,
grp_ids = prev_grps,
combine_list = combine_list
)
else:
prev_grps.append(grp_id)
self.__find_bridges_by_gene(
new,
from_regulon,
meta_grn,
allowrance - 1,
grp_skip_list,
combine_list,
prev_grps
)
else:
raise lib.Error('Reached a negative allowrance value')
# update combine_list if a GRP found can be the bridge between regulons
def __update_combine_list(self, reg_id1, reg_id2, grp_ids, combine_list):
# check action to perform
ind_1 = None
ind_2 = None
for index, ele in enumerate(combine_list):
# check which regulon set to add
if reg_id1 in ele[0]: ind_1 = index
if reg_id2 in ele[0]: ind_2 = index
if ind_1 is None and ind_2 is None:
combine_list.append([[reg_id1, reg_id2], [id for id in grp_ids]])
# one of regulons already need to combine
elif ind_1 is None and ind_2 is not None:
combine_list[ind_2][0].append(reg_id1)
combine_list[ind_2][1] += grp_ids
elif ind_1 is not None and ind_2 is None:
combine_list[ind_1][0].append(reg_id2)
combine_list[ind_1][1] += grp_ids
# both regulons already in combine list
elif ind_1 == ind_2:
combine_list[ind_1][1] += grp_ids
else:
combine_list[ind_1][1] += grp_ids
combine_list[ind_1][0] += combine_list[ind_2][0]
combine_list[ind_1][1] += combine_list[ind_2][1]
del combine_list[ind_2]
def __update_regulon_gene_list(self, source, target, gene_list, reversable):
assert source not in gene_list[target].source
assert source not in gene_list[target].target
assert target not in gene_list[source].source
assert target not in gene_list[source].target
gene_list[target].source.append(source)
gene_list[source].target.append(target)
if reversable:
gene_list[source].source.append(target)
gene_list[target].target.append(source)
def update_regulon_with_grp(self, grp, meta_grn):
update_ind = None
combine_ind = None
source_regulon_ind = None
target_regulon_ind = None
source = grp.regulatory_source
target = grp.regulatory_target
# check whether GRP could be appended into an exist regulon
for i, regulon in enumerate(self.regulons):
if source in regulon.genes:
assert source_regulon_ind is None
source_regulon_ind = i
if target in regulon.genes:
assert target_regulon_ind is None
target_regulon_ind = i
if source_regulon_ind is None and target_regulon_ind is None:
# make new regulon data
regulon = grn.GRN()
regulon.grps[grp.id] = grp
regulon.genes[source] = copy.deepcopy(meta_grn.genes[source])
regulon.genes[target] = copy.deepcopy(meta_grn.genes[target])
self.regulons.append(regulon)
return
elif source_regulon_ind is not None and target_regulon_ind is not None:
if source_regulon_ind == target_regulon_ind:
update_ind = source_regulon_ind
# combine regulons if two are involved
elif source_regulon_ind != target_regulon_ind:
update_ind = source_regulon_ind
combine_ind = target_regulon_ind
elif source_regulon_ind is not None:
update_ind = source_regulon_ind
elif target_regulon_ind is not None:
update_ind = target_regulon_ind
else:
raise lib.Error('Something wrong with regulon updating process')
# update regulon if found destination
if update_ind is not None:
# append GRP into self.regulons[update_ind]
assert grp.id not in self.regulons[update_ind].grps
self.regulons[update_ind].grps[grp.id] = grp
# update gene list
if source not in self.regulons[update_ind].genes:
self.regulons[update_ind].genes[source] = copy.deepcopy(
meta_grn.genes[source]
)
elif target not in self.regulons[update_ind].genes:
self.regulons[update_ind].genes[target] = copy.deepcopy(
meta_grn.genes[target]
)
# combine 2 regulons if new GRP can connect two
if combine_ind is not None:
self.__combine_regulons(ind_1 = update_ind, ind_2 = combine_ind)
del self.regulons[combine_ind] | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/lib/atlas_extractor.py | atlas_extractor.py |
import ageas.tool.grn as grn
from xgboost import XGBRegressor
from collections import OrderedDict
import ageas.lib as lib
class Predict:
"""
Predict regulation targets for source TFs
Essentially, this is stimulating how GRNBoost2 works
But thread can be automatically set by checking confirmed GRPs
"""
def __init__(self, gem_data, sample_grps = None, thread = None):
super(Predict, self).__init__()
assert sample_grps is not None or thread is not None
self.class1_gem = gem_data.class1
self.class2_gem = gem_data.class2
self.thread = 1 / len(gem_data.genes)
if thread is not None and thread != 'auto':
self.thread = float(thread)
elif thread == 'auto':
self.thread = self.__auto_set_thread(sample_grps)
else:
raise lib.Error('Predictor thread setting is wrong')
# Expand meta GRN by applying GRNBoost2-like algo on source TFs without
# documented targets
def expand_meta_grn(self, meta_grn, genes, correlation_thread):
for gene in genes:
class1FeatImpts, class2FeatImpts = self._getFeatureImportences(gene)
self.__update_grps_to_meta_grn(
meta_grn,
correlation_thread,
gene,
self.class1_gem.index,
class1FeatImpts,
)
self.__update_grps_to_meta_grn(
meta_grn,
correlation_thread,
gene,
self.class2_gem.index,
class2FeatImpts,
)
return meta_grn
# decide whether update GRPs associated with given gene into GRN guidance
def __update_grps_to_meta_grn(self,
meta_grn,
correlation_thread,
gene,
gene_list,
feature_importances,
):
if feature_importances is None: return
for i in range(len(gene_list)):
tar = gene_list[i]
if feature_importances[i] > self.thread:
grp_id = grn.GRP(gene, tar).id
if grp_id not in meta_grn.grps:
meta_grn.update_grn(
source = gene,
target = tar,
gem1 = self.class1_gem,
gem2 = self.class2_gem,
correlation_thread = correlation_thread
)
return
# Automatically set prediction thread by tuning with sample GRPs
# Essentially this part is finding a regulatory source having most
# regulatory targets, and then find a confirmed GRP with minimum
# importance predicted by GBM based method
def __auto_set_thread(self, sample_grps):
regulatory_sources = {}
for grp in sample_grps:
source = sample_grps[grp].regulatory_source
if source not in regulatory_sources:
regulatory_sources[source] = [grp]
else:
regulatory_sources[source].append(grp)
regulatory_sources = OrderedDict(
sorted(regulatory_sources.items(), key = lambda x: x[1])
)
# Choose a key presenting in both classes
i = 0
for src in regulatory_sources:
if src in self.class1_gem.index and src in self.class2_gem.index:
break
if i == len(regulatory_sources) - 1:
return self.thread
i += 1
grps = regulatory_sources[src]
class1FeatImpts, class2FeatImpts = self._getFeatureImportences(src,True)
assert len(self.class1_gem.index) == len(class1FeatImpts)
assert len(self.class2_gem.index) == len(class2FeatImpts)
# Extract all genes influenced among regulon/GRPs
genes = {}
for grp_id in grps:
fators = grp_id.split('_')
assert len(fators) == 2
if fators[0] not in genes: genes[fators[0]] = 0
if fators[1] not in genes: genes[fators[1]] = 0
# assign feature importances to each gene in selected reg source's GRPs
for i in range(len(self.class1_gem.index)):
if self.class1_gem.index[i] in genes and class1FeatImpts[i] > 0:
genes[self.class1_gem.index[i]] = class1FeatImpts[i]
for i in range(len(self.class2_gem.index)):
gene = self.class2_gem.index[i]
if gene in genes and class2FeatImpts[i] > 0:
if genes[gene] == 0:
genes[gene] = class2FeatImpts[i]
else:
genes[gene] = (genes[gene] + class2FeatImpts[i]) / 2
# take out genes with 0 importances and reorder the dict
genes = {x:genes[x] for x in genes if genes[x] > 0}
genes = OrderedDict(sorted(genes.items(), key = lambda x: x[1]))
return min(genes[next(iter(genes))], self.thread)
# Basically, this part mimicing what GRNBoost2 does
def _getFeatureImportences(self, key, checked_in_gem = False):
if checked_in_gem or key in self.class1_gem.index:
c1_result = self.__gbm_feature_importances_calculat(
self.class1_gem,
key
)
else: c1_result = None
if checked_in_gem or key in self.class2_gem.index:
c2_result = self.__gbm_feature_importances_calculat(
self.class2_gem,
key
)
else:
c2_result = None
return c1_result, c2_result
# as named
def __gbm_feature_importances_calculat(self, gem, key, random_state = 0):
gbm = XGBRegressor(random_state = random_state)
gbm.fit(gem.transpose(), list(gem.loc[[key]].values[0]))
return gbm.feature_importances_ | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/lib/grp_predictor.py | grp_predictor.py |
import warnings
import math
import pandas as pd
from collections import Counter
import ageas.lib as lib
import ageas.tool as tool
import ageas.tool.grn as grn
import ageas.tool.json as json
import ageas.lib.grp_predictor as grp
class Analysis(object):
"""
Find important factors simply by GRN degree.
"""
def __init__(self, meta_grn, top = None):
super(Analysis, self).__init__()
self.top = top
temp = {}
for ele in meta_grn.grps:
source = meta_grn.grps[ele].regulatory_source
target = meta_grn.grps[ele].regulatory_target
if source not in temp:
temp[source] = 1
else:
temp[source] += 1
if target not in temp:
temp[target] = 1
else:
temp[target] += 1
if self.top is None: self.top = len(temp)
temp = [[k[0],
meta_grn.genes[k[0]].type,
k[1]] for k in Counter(temp).most_common(self.top)]
# adding log2FC
for ele in temp:
exp = meta_grn.genes[ele[0]].expression_mean
ele.append(abs(math.log2((exp['class1']+1) / (exp['class2']+1))))
# changing to dataframe type
self.result = pd.DataFrame(temp,columns=['ID','Type','Degree','Log2FC'])
def save(self, path):
self.result.to_csv(path, index = False )
class Cast:
"""
Cast Meta GRN based on GEMs
"""
def __init__(self,
gem_data = None,
prediction_thread = None,
correlation_thread = 0.2,
load_path = None
):
super(Cast, self).__init__()
# Initialization
self.grn = grn.GRN(id = 'Meta')
self.tfs_no_interaction_rec = dict()
# Load or Cast
if load_path is not None:
self.grn.load_dict(dict = json.decode(load_path))
else:
self.__cast(gem_data, prediction_thread, correlation_thread)
# Process to Cast out GRN construction guidance
def __cast(self, gem_data, prediction_thread, correlation_thread):
# proces guidance casting process based on avaliable information
if gem_data.interactions is not None:
if gem_data.database_info.interaction_db == 'gtrd':
self.__with_gtrd(gem_data, correlation_thread)
elif gem_data.database_info.interaction_db == 'biogrid':
self.__with_biogrid(gem_data, correlation_thread)
else:
self.__no_interaction(gem_data, correlation_thread)
self.tfs_no_interaction_rec = [x for x in self.tfs_no_interaction_rec]
# print out amount of TFs not covered by selected interaction database
print(' Predicting interactions for',
len(self.tfs_no_interaction_rec),
'TFs not covered in interaction DB')
# Start GRNBoost2-like process if thread is set
if prediction_thread is not None and len(self.tfs_no_interaction_rec)>0:
gBoost = grp.Predict(gem_data, self.grn.grps, prediction_thread)
self.grn = gBoost.expand_meta_grn(
self.grn,
self.tfs_no_interaction_rec,
correlation_thread
)
print(' Total amount of GRPs in Meta GRN:', len(self.grn.grps))
print(' Total amount of Genes in Meta GRN:', len(self.grn.genes))
# else: raise lib.Error('Sorry, such mode is not supported yet!')
""" ToDo: if more than 1 guide can be casted, make agreement """
return
# Make GRN casting guide with GTRD data
def __with_gtrd(self, data, correlation_thread):
# Iterate source TF candidates for GRP
for source in data.genes:
# Go through tf_list filter if avaliable
if data.tf_list is not None and source not in data.tf_list:
continue
# Get Uniprot ID to use GTRD
uniprot_ids = []
try:
for id in data.interactions.idmap[source].split(';'):
if id in data.interactions.data:
uniprot_ids.append(id)
except:
warnings.warn(source, 'not in Uniprot ID Map.')
# pass this TF if no recorded interactions in GTRD
if len(uniprot_ids) == 0:
self.tfs_no_interaction_rec[source] = None
continue
# get potential regulatory targets
reg_target = {}
for id in uniprot_ids:
reg_target.update(data.interactions.data[id])
# Handle source TFs with no record in target database
if len(reg_target) == 0:
if source not in self.tfs_no_interaction_rec:
self.tfs_no_interaction_rec[source] = None
continue
else:
raise lib.Error('Duplicat source TF when __with_gtrd')
# Iterate target gene candidates for GRP
for target in data.genes:
# Handle source TFs with record in target database
if target in reg_target:
self.grn.update_grn(
source = source,
target = target,
gem1 = data.class1,
gem2 = data.class2,
correlation_thread = correlation_thread
)
# update Gene informations
for gene in self.grn.genes:
if data.database_info.factor_name_type == 'gene_name':
self.grn.genes[gene].add_name(gene)
elif data.database_info.factor_name_type == 'ens_id':
self.grn.genes[gene].add_ens_id(gene)
if data.tf_list is not None and gene in data.tf_list:
self.grn.genes[gene].type = 'TF'
if gene in data.interactions.idmap:
for id in data.interactions.idmap[gene].split(';'):
self.grn.genes[gene].add_uniprot_id(id)
return
# Make GRN casting guide with bioGRID data
def __with_biogrid(self, data, correlation_thread):
# Iterate source TF candidates for GRP
for source in data.genes:
# Go through tf_list filter if avaliable
if data.tf_list is not None and source not in data.tf_list:
continue
reg_target = {}
if source in data.interactions.data:
reg_target = {i:None for i in data.interactions.data[source]}
elif source in data.interactions.alias:
alias_list = data.interactions.alias[source]
for ele in alias_list:
temp = {tar:None for tar in data.interactions.data[ele]}
reg_target.update(temp)
else:
self.tfs_no_interaction_rec[source] = None
continue
# Handle source TFs with no record in target database
if len(reg_target) == 0:
if source not in self.tfs_no_interaction_rec:
self.tfs_no_interaction_rec[source] = None
continue
else:
raise lib.Error('Duplicat source TF when __with_biogrid')
for target in data.genes:
passing = False
# Handle source TFs with record in target database
if target in reg_target:
passing = True
elif target in data.interactions.alias:
for ele in data.interactions.alias[target]:
if ele in reg_target:
passing = True
if passing:
self.grn.update_grn(
source = source,
target = target,
gem1 = data.class1,
gem2 = data.class2,
correlation_thread = correlation_thread
)
return
# Kinda like GTRD version but only with correlation_thread and
def __no_interaction(self, data, correlation_thread):
# Iterate source TF candidates for GRP
for source in data.genes:
# Go through tf_list filter if avaliable
if data.tf_list is not None and source not in data.tf_list:
continue
for target in data.genes:
self.grn.update_grn(
source = source,
target = target,
gem1 = data.class1,
gem2 = data.class2,
correlation_thread = correlation_thread
)
return | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/lib/meta_grn_caster.py | meta_grn_caster.py |
import re
import torch
import pickle
import difflib
import numpy as np
import pandas as pd
from warnings import warn
import ageas.lib as lib
import ageas.classifier as clf
import ageas.classifier.gnb as gnb
import ageas.classifier.rfc as rfc
import ageas.classifier.xgb as xgb
import ageas.classifier.svm as svm
import ageas.classifier.rnn as rnn
import ageas.classifier.gru as gru
import ageas.classifier.lstm as lstm
import ageas.classifier.logit as logit
import ageas.classifier.transformer as transformer
import ageas.classifier.cnn_1d as cnn_1d
import ageas.classifier.cnn_hybrid as cnn_hybrid
import ageas.database_setup.binary_class as binary_class
class Train(clf.Make_Template):
"""
Train out well performing classification models
"""
def __init__(self,
psGRNs = None,
cpu_mode = False,
database_info = None,
model_config = None,
random_state = None,
test_split_set = False,
):
super(Train, self).__init__()
# Initialization
self.grns = psGRNs
self.models = None
self.allData = None
self.allLabel = None
self.cpu_mode = cpu_mode
self.random_state = random_state
self.model_config = model_config
self.database_info = database_info
self.test_split_set = test_split_set
def general_process(self,
train_size = 0.3,
clf_keep_ratio = None,
clf_accuracy_thread = None
):
"""
Generate training data and testing data iteratively
Then train out models in model sets
Only keep top performancing models in each set
"""
data = binary_class.Process(
self.database_info,
self.grns,
train_size,
self.random_state,
self.allData,
self.allLabel
)
data.auto_inject_fake_grps()
# Update allGRP_IDs, allData, allLabel after first iteration
# to try to avoid redundant calculation
if self.allData is None and self.allLabel is None:
print('Total GRP amount: ', len(data.all_grp_ids))
self.allData = data.dataTrain + data.dataTest
self.allLabel = np.concatenate((data.labelTrain, data.labelTest))
assert len(self.allData) == len(self.allLabel)
self.allData = pd.DataFrame(self.allData, columns= data.all_grp_ids)
# Do trainings
self.models = self.__initialize_classifiers(self.model_config)
for modelSet in self.models:
modelSet.train(data, self.test_split_set)
if self.test_split_set:
modelSet._filter_models(clf_keep_ratio, clf_accuracy_thread)
# Concat models together based on performace
temp = []
for models in self.models:
for model in models.models:
temp.append(model)
self.models = temp
# Keep best performancing models in local test
if self.test_split_set and clf_keep_ratio is not None:
self._filter_models(clf_keep_ratio, clf_accuracy_thread)
# Filter based on global test performace
self.models = self.get_clf_accuracy(
clf_list = self.models,
data = self.allData.to_numpy(),
label = self.allLabel
)
self._filter_models(clf_keep_ratio, clf_accuracy_thread)
print('Keeping ', len(self.models), ' models')
def successive_pruning(self,
iteration = 2,
clf_keep_ratio = 0.5,
clf_accuracy_thread = 0.9,
last_train_size = 0.9,
):
"""
Train out models in Successive Halving manner
Amount of training data is set as limited resouce
While accuracy is set as evaluation standard
"""
assert last_train_size < 1.0
if self.test_split_set:
warn('Trainer Warning: test_split_set is True! Changing to False.')
self.test_split_set = False
# set iteration to 0 if not doing model selection
if iteration is None: iteration = 0
# initialize training data set
init_train_size = float(1 / pow(2, iteration))
train_size = 0
for i in range(iteration):
breaking = False
train_size = float(init_train_size * pow(2, i))
# about last round, we set train size to the max resouce
if train_size >= last_train_size:
train_size = last_train_size
breaking = True
print('Iteration:', i, ' with training size:', train_size)
keep_ratio = max(1 - train_size, clf_keep_ratio)
# remove more and more portion as more resource being avaliable
self.general_process(
train_size = train_size,
clf_accuracy_thread = clf_accuracy_thread,
clf_keep_ratio = keep_ratio
)
self.__prune_model_config(id_keep={x[0].id:''for x in self.models})
if breaking: break
if train_size < last_train_size:
print('Iteration Last: with training size:', last_train_size)
self.general_process(
train_size = last_train_size,
clf_accuracy_thread = clf_accuracy_thread
)
else: self._filter_models(clf_accuracy_thread = clf_accuracy_thread)
self.__prune_model_config(id_keep = {x[0].id:'' for x in self.models})
total_model = 0
for genra in self.model_config:
total_model += len(self.model_config[genra])
print('Selecting ', total_model, ' Models after Model Selection')
# Re-assign accuracy based on all data performance
def get_clf_accuracy(self, clf_list, data, label):
i = 0
for record in clf_list:
model = record[0]
i+=1
# Handel SVM and XGB cases
# Or maybe any sklearn-style case
if (model.model_type == 'SVM' or
model.model_type == 'Logit' or
model.model_type == 'GNB' or
model.model_type == 'RFC' or
model.model_type == 'XGB_GBM'):
pred_result = model.clf.predict(data)
pred_accuracy = difflib.SequenceMatcher(
None,
pred_result,
label
).ratio()
# RNN type handling + CNN cases
elif (model.model_type == 'RNN' or
model.model_type == 'LSTM' or
model.model_type == 'GRU' or
model.model_type == 'Transformer' or
re.search(r'CNN', model.model_type)):
# Enter eval mode and turn off gradient calculation
model.eval()
with torch.no_grad():
pred_result = model(clf.reshape_tensor(data))
pred_accuracy, pred_result = self.__evaluate_torch(
pred_result,
label
)
else:
raise lib.Error('Cannot handle classifier: ', model.model_type)
record[-1] = pred_result
record.append(pred_accuracy)
# For debug purpose
# print('Performined all data test on model', i,
# ' type:', model.model_type, '\n',
# 'test set accuracy:', round(accuracy, 2),
# ' all data accuracy: ', round(pred_accuracy, 2), '\n')
clf_list.sort(key = lambda x:x[-1], reverse = True)
return clf_list
# clear stored data
def clear_data(self):
self.grns = None
self.models = None
self.allData = None
self.allLabel = None
# Save result models in given path
def save_models(self, path):
with open(path, 'wb') as file:
pickle.dump(self.models, file)
# Make model sets based on given config
def __initialize_classifiers(self, config):
list = []
if 'Logit' in config:
list.append(logit.Make(
config = config['Logit'],
cpu_mode = self.cpu_mode,
)
)
if 'Transformer' in config:
list.append(transformer.Make(
config = config['Transformer'],
cpu_mode = self.cpu_mode,
)
)
if 'RFC' in config:
list.append(rfc.Make(
config = config['RFC'],
cpu_mode = self.cpu_mode,
)
)
if 'GNB' in config:
list.append(gnb.Make(
config = config['GNB'],
cpu_mode = self.cpu_mode,
)
)
if 'GBM' in config:
list.append(xgb.Make(
config = config['GBM'],
cpu_mode = self.cpu_mode,
)
)
if 'SVM' in config:
list.append(svm.Make(
config = config['SVM'],
cpu_mode = self.cpu_mode,
)
)
if 'CNN_1D' in config:
list.append(cnn_1d.Make(
config = config['CNN_1D'],
cpu_mode = self.cpu_mode,
)
)
if 'CNN_Hybrid' in config:
list.append(cnn_hybrid.Make(
config = config['CNN_Hybrid'],
cpu_mode = self.cpu_mode,
grp_amount = len(self.allData.columns),
)
)
if 'RNN' in config:
list.append(rnn.Make(
config = config['RNN'],
cpu_mode = self.cpu_mode,
)
)
if 'LSTM' in config:
list.append(lstm.Make(
config = config['LSTM'],
cpu_mode = self.cpu_mode,
)
)
if 'GRU' in config:
list.append(gru.Make(
config = config['GRU'],
cpu_mode = self.cpu_mode,
)
)
return list
# delete model configs not on while list(dict)
def __prune_model_config(self, id_keep):
result = {}
for genra in self.model_config:
temp = {}
for id in self.model_config[genra]:
if id in id_keep:
temp[id] = self.model_config[genra][id]
else:
print(' Pruning:', id)
if len(temp) > 0: result[genra] = temp
self.model_config = result
# Evaluate pytorch based methods'accuracies
def __evaluate_torch(self, result, label):
modifiedResult = []
correct = 0
for i in range(len(result)):
if result[i][0] > result[i][1]: predict = 0
else: predict = 1
if predict == label[i]: correct += 1
modifiedResult.append(predict)
accuracy = correct / len(label)
return accuracy, modifiedResult | Ageas | /Ageas-0.0.1a6.tar.gz/Ageas-0.0.1a6/ageas/lib/clf_trainer.py | clf_trainer.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.