repo_name
stringlengths 6
61
| path
stringlengths 4
230
| copies
stringlengths 1
3
| size
stringlengths 4
6
| text
stringlengths 1.01k
850k
| license
stringclasses 15
values | hash
int64 -9,220,477,234,079,998,000
9,219,060,020B
| line_mean
float64 11.6
96.6
| line_max
int64 32
939
| alpha_frac
float64 0.26
0.9
| autogenerated
bool 1
class | ratio
float64 1.62
6.1
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
jphacks/KB_02 | source/execute_background.py | 1 | 1932 | #! /usr/bin/python
# -*- coding: UTF-8 -*-
from __future__ import print_function
import subprocess
import threading
import locale
import sys
import os
locale.setlocale (locale.LC_ALL, '')
class ExecuteBackground (threading.Thread): # threading.Threadを継承
"""
プロセスをバックグラウンドで実行する
Threadクラスの子クラスとして定義(__init__()とrun()をオーバーライドして使用)
threading.Thread.__init__(self)の呼び出しは必須
"""
def __init__ (self, **dic):
"""
オブジェクトの初期化
"""
threading.Thread.__init__ (self) # 必ず呼び出す
self._id = dic['id']
self._args = dic['cmd']
self._subproc_args = { 'stdin' : subprocess.PIPE,
'stdout' : subprocess.PIPE,
'stderr' : subprocess.STDOUT,
'cwd' : dic['cwd'],
'close_fds' : True, }
def run (self):
"""
スレッド内で行われる処理を記述
"""
try:
p = subprocess.Popen (self._args, **self._subproc_args)
except OSError as e:
print ('Failed to execute command "{0}": [{1}] {2}'.format (self._args[0], e.errno, e.strerror), file=sys.stderr)
return
(stdouterr, stdin) = (p.stdout, p.stdin)
print ('-- output [{0}] begin --'.format (self._id))
if sys.version_info.major == 3:
while True:
line = str (stdouterr.readline (), encoding='utf-8')
#line = stdouterr.readline ().decode ('utf-8') # decode()を用いる場合
if not line:
break
print (line.rstrip ())
else:
while True:
line = stdouterr.readline ()
if not line:
break
print (line.rstrip ())
print ('-- output [{0}] end --'.format (self._id))
ret = p.wait ()
print ('[{0}] Return code: {1}'.format (self._id, ret))
| mit | -4,282,219,628,612,054,000 | 28.627119 | 119 | 0.538902 | false | 2.908486 | false | false | false |
harisibrahimkv/django | django/conf/locale/sr_Latn/formats.py | 130 | 1944 | # This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y.'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j. F Y. H:i'
YEAR_MONTH_FORMAT = 'F Y.'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'j.m.Y.'
SHORT_DATETIME_FORMAT = 'j.m.Y. H:i'
FIRST_DAY_OF_WEEK = 1
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d.%m.%Y.', '%d.%m.%y.', # '25.10.2006.', '25.10.06.'
'%d. %m. %Y.', '%d. %m. %y.', # '25. 10. 2006.', '25. 10. 06.'
# '%d. %b %y.', '%d. %B %y.', # '25. Oct 06.', '25. October 06.'
# '%d. %b \'%y.', '%d. %B \'%y.', # '25. Oct '06.', '25. October '06.'
# '%d. %b %Y.', '%d. %B %Y.', # '25. Oct 2006.', '25. October 2006.'
]
DATETIME_INPUT_FORMATS = [
'%d.%m.%Y. %H:%M:%S', # '25.10.2006. 14:30:59'
'%d.%m.%Y. %H:%M:%S.%f', # '25.10.2006. 14:30:59.000200'
'%d.%m.%Y. %H:%M', # '25.10.2006. 14:30'
'%d.%m.%Y.', # '25.10.2006.'
'%d.%m.%y. %H:%M:%S', # '25.10.06. 14:30:59'
'%d.%m.%y. %H:%M:%S.%f', # '25.10.06. 14:30:59.000200'
'%d.%m.%y. %H:%M', # '25.10.06. 14:30'
'%d.%m.%y.', # '25.10.06.'
'%d. %m. %Y. %H:%M:%S', # '25. 10. 2006. 14:30:59'
'%d. %m. %Y. %H:%M:%S.%f', # '25. 10. 2006. 14:30:59.000200'
'%d. %m. %Y. %H:%M', # '25. 10. 2006. 14:30'
'%d. %m. %Y.', # '25. 10. 2006.'
'%d. %m. %y. %H:%M:%S', # '25. 10. 06. 14:30:59'
'%d. %m. %y. %H:%M:%S.%f', # '25. 10. 06. 14:30:59.000200'
'%d. %m. %y. %H:%M', # '25. 10. 06. 14:30'
'%d. %m. %y.', # '25. 10. 06.'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
| bsd-3-clause | -7,221,901,341,107,972,000 | 44.209302 | 77 | 0.459362 | false | 2.314286 | false | true | false |
juve/corral | tools/package_mapper/PackageMapper.py | 1 | 1641 | import string,cgi
from datetime import datetime
from os import curdir, sep
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
from SocketServer import ThreadingMixIn
CONDOR_URL='http://www.cs.wisc.edu/condor/glidein/binaries/'
CORRAL_URL='http://www-rcf.usc.edu/~juve/glidein/'
class PackageMapper(HTTPServer,ThreadingMixIn):
pass
class PackageMapperHandler(BaseHTTPRequestHandler):
def log_message(self, format, *args):
pass
def do_GET(self):
# Get requested package name
package = self.path[1:]
# Log the request
date = datetime.utcnow().isoformat()+'Z'
print date,self.client_address[0],package
# Perform the mapping
urls = self.map(package)
# Send URLs back to client
self.send_response(200)
self.send_header('Content-type','text/plain')
self.end_headers()
for url in urls:
self.wfile.write(url+'\n')
def map(self, package):
mappings = []
# Process package
if package.endswith('.tar.gz'):
comp = package[:-7].split('-')
if len(comp) == 5:
# Parse the package components
condor_version = comp[0]
arch = comp[1]
opsys = comp[2]
opsys_version = comp[3]
glibc_version = comp[4][5:]
print condor_version,arch,opsys,opsys_version,glibc_version
# Add default mappings
mappings.append(CORRAL_URL+package)
mappings.append(CONDOR_URL+package)
return mappings
def main():
try:
server = PackageMapper(('',10960),PackageMapperHandler)
print 'Started Package Mapper on port 10960...'
server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down server'
server.socket.close()
if __name__ == '__main__':
main()
| apache-2.0 | -7,160,277,234,369,766,000 | 23.132353 | 63 | 0.706886 | false | 3.149712 | false | false | false |
lukauskas/mMass-fork | gui/dlg_error.py | 2 | 4327 | # -------------------------------------------------------------------------
# Copyright (C) 2005-2013 Martin Strohalm <www.mmass.org>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# Complete text of GNU GPL can be found in the file LICENSE.TXT in the
# main directory of the program.
# -------------------------------------------------------------------------
# load libs
import sys
import platform
import numpy
import wx
# load modules
import mwx
import config
import images
# SYSTEM ERROR
# ------------
class dlgError(wx.Dialog):
"""Show exception message."""
def __init__(self, parent, exception=''):
wx.Dialog.__init__(self, parent, -1, 'Application Error', style=wx.DEFAULT_DIALOG_STYLE|wx.STAY_ON_TOP)
# get system information
self.exception = ''
self.exception += exception
self.exception += '\n-------------------------'
self.exception += '\nmMass: %s' % (config.version)
self.exception += '\nPython: %s' % str(platform.python_version_tuple())
self.exception += '\nwxPython: %s' % str(wx.version())
self.exception += '\nNumPy: %s' % str(numpy.version.version)
self.exception += '\n-------------------------'
self.exception += '\nArchitecture: %s' % str(platform.architecture())
self.exception += '\nMachine: %s' % str(platform.machine())
self.exception += '\nPlatform: %s' % str(platform.platform())
self.exception += '\nProcessor: %s' % str(platform.processor())
self.exception += '\nSystem: %s' % str(platform.system())
self.exception += '\nMac: %s' % str(platform.mac_ver())
self.exception += '\nMSW: %s' % str(platform.win32_ver())
self.exception += '\nLinux: %s' % str(platform.dist())
self.exception += '\n-------------------------\n'
self.exception += 'Add your comments:\n'
# make GUI
sizer = self.makeGUI()
# fit layout
self.Layout()
sizer.Fit(self)
self.SetSizer(sizer)
self.SetMinSize(self.GetSize())
self.Centre()
# ----
def makeGUI(self):
"""Make GUI elements."""
# make elements
self.exception_value = wx.TextCtrl(self, -1, self.exception, size=(400,250), style=wx.TE_MULTILINE)
self.exception_value.SetFont(wx.SMALL_FONT)
message_label = wx.StaticText(self, -1, "Uups, another one...\nUnfortunately, you have probably found another bug in mMass.\nPlease send me this error report to [email protected] and I will try to fix it.\nI apologize for any inconvenience due to this bug.\nI strongly recommend to restart mMass now.")
message_label.SetFont(wx.SMALL_FONT)
icon = wx.StaticBitmap(self, -1, images.lib['iconError'])
quit_butt = wx.Button(self, -1, "Quit mMass")
quit_butt.Bind(wx.EVT_BUTTON, self.onQuit)
cancel_butt = wx.Button(self, wx.ID_CANCEL, "Try to Continue")
# pack elements
messageSizer = wx.BoxSizer(wx.HORIZONTAL)
messageSizer.Add(icon, 0, wx.RIGHT, 10)
messageSizer.Add(message_label, 0, wx.ALIGN_LEFT)
buttSizer = wx.BoxSizer(wx.HORIZONTAL)
buttSizer.Add(quit_butt, 0, wx.RIGHT, 15)
buttSizer.Add(cancel_butt, 0)
mainSizer = wx.BoxSizer(wx.VERTICAL)
mainSizer.Add(self.exception_value, 0, wx.EXPAND|wx.CENTER|wx.ALL, mwx.PANEL_SPACE_MAIN)
mainSizer.Add(messageSizer, 0, wx.ALIGN_LEFT|wx.LEFT|wx.RIGHT|wx.BOTTOM, mwx.PANEL_SPACE_MAIN)
mainSizer.Add(buttSizer, 0, wx.ALIGN_RIGHT|wx.LEFT|wx.RIGHT|wx.BOTTOM, mwx.PANEL_SPACE_MAIN)
return mainSizer
# ----
def onQuit(self, evt):
"""Quit application."""
sys.exit()
# ----
| gpl-3.0 | -6,518,802,465,695,283,000 | 38.336364 | 310 | 0.583776 | false | 3.792287 | false | false | false |
twoolie/ProjectNarwhal | fabfile.py | 1 | 3536 | from fabric.api import *
from fabric.contrib import files
import platform
from os.path import join
from argparse import ArgumentError
env.hosts = ['your_username@yourhost:yourport']
env.web_path = '/var/www/django'
env.log_root ='/var/log/apache'
env.project = 'example_project'
@task
def dev_serve():
with prefix('Scripts\\activate.bat' if platform.system()=="Windows" else 'source bin/activate'):
local(join('compass watch %(project)s','static &') % env)
local(join('python %(project)s','manage.py runserver') % env)
@task
def bootstap_dev():
""" Bootstrap your local development environment """
local('git clone https://github.com/twoolie/ProjectNarwhal.git')
with lcd('ProjectNarwhal'):
local('virtualenv --distribute .')
with prefix('Scripts\\activate.bat' if platform.system()=="Windows" else 'source bin/activate'):
local('pip install -r requirements.txt')
local(join('python %(project)s','manage.py syncdb --all') % env)
local(join('python %(project)s','manage.py migrate --fake') % env)
@task
def bootstrap(hostname, path=env.web_path, **kwargs):
""" Creates a virtualhost instance on the box you specify
`fab -H server1,server2 bootstrap:narwhal.example.com[,deploy_options...]` """
run('mkdir -p %(path)s/%(hostname)s/' % locals())
with cd('%(path)s/%(hostname)s/' % locals()):
run('git init .')
locals().update(kwargs)
deploy(**locals()) # deploy script takes care of the rest
@task
def deploy(hostname, ref='master', path=env.web_path, apache_conf_path=None, distro=None, \
log_root=env.log_root, thread_count=2, process_count=4):
""" `fab -H server1,server2 deploy:narwhal.example.com` """
if not apache_conf_path: apache_conf_path=find_apache_path(distro)
local('git push -f ssh://%(host_string)s/%(path)s/%(hostname)s/ %(ref)s' % locals())
with cd('%(path)s/%(hostname)s' % locals()):
run('git checkout -f %(ref)s' % locals())
run('pip install -r requirements.txt')
with cd(env.project):
files.upload_template('apache.conf', apache_conf_path+hostname,
context=locals(), mode=0755, use_sudo=True)
run('./manage.py collectstatic --noinput')
run('./manage.py syncdb')
run('./manage.py migrate')
run('touch serve.wsgi') # restart the wsgi process
@task
def deploy_epio(appname):
""" fab deploy_epio:appname """
with lcd(env.project), prefix('Scripts\\activate.bat' if platform.system()=="Windows" else 'source bin/activate'):
local('python manage.py collectstatic --noinput')
local( ('mklink /D %(link)s %s(target)' if platform.system()=="Windows" else 'ln -s %(target)s %(link)s') \
% { 'link':'narwhal', 'target':'../narwhal' })
local('python manage.py build_solr_schema > solr_schema.xml')
local('epio upload -a %(appname)s'%locals())
local('epio django syncdb -a %(appname)s'%locals())
local('epio django migrate -a %(appname)s'%locals())
local('epio django rebuild_index')
#-------- Utils ----------
def _join(*args):
return "/".join(args)
def find_apache_path(distro):
if not distro:
distro = run('python -c "import platform; print platform.dist()[0]"')
if distro in ('debian', 'ubuntu'):
return '/etc/apache2/sites-enabled/'
else:
raise ArgumentError('Cannot automatically determine apache_conf_path')
| gpl-3.0 | -6,157,937,750,131,683,000 | 41.095238 | 118 | 0.623303 | false | 3.582573 | false | false | false |
flavour/eden | modules/templates/locations/MM/config.py | 9 | 1031 | # -*- coding: utf-8 -*-
from gluon import current
def config(settings):
"""
Template settings for Myanmar
- designed to be used in a Cascade with an application template
"""
#T = current.T
# Pre-Populate
settings.base.prepopulate.append("locations/MM")
# Uncomment to restrict to specific country/countries
settings.gis.countries.append("MM")
# Disable the Postcode selector in the LocationSelector
settings.gis.postcode_selector = False
# L10n (Localization) settings
settings.L10n.languages["my"] = "Burmese"
# Default Language (put this in custom template if-required)
#settings.L10n.default_language = "my"
# Default timezone for users
settings.L10n.timezone = "Asia/Rangoon"
# Default Country Code for telephone numbers
settings.L10n.default_country_code = 95
settings.fin.currencies["MMK"] = "Myanmar Kyat"
settings.fin.currency_default = "MMK"
# END =========================================================================
| mit | 7,511,900,009,144,343,000 | 30.242424 | 79 | 0.639185 | false | 3.950192 | false | false | false |
roscopecoltran/scraper | .staging/meta-engines/xlinkBook/update/update_videolectures.py | 1 | 9204 | #!/usr/bin/env python
from spider import *
import re
sys.path.append("..")
from record import Record
class VideolecturesSpider(Spider):
def __init__(self):
Spider.__init__(self)
self.school = 'videolectures'
self.type_map = {'Lecture ' : 'vl',\
'Tutorial' : 'vtt',\
'Keynote' : 'vkn',\
'Interview' : 'viv',\
'Other' : '__'}
self.subject_cid_map = {'Machine Learning' : '16',\
'Data Mining' : '36',\
'Computer Vision' : '71',\
'Network Analysis' : '28',\
'Data Visualisation' : '41',\
'Natural Language Processing' : '144',\
'Pattern Recognition' : '395',\
'Text Mining' : '37',\
'Web Mining' : '127',\
'Robotics' : '69',\
'Artificial Intelligence' : '136',\
'Big Data' : '602',\
'Semantic Web' : '27',\
'Web Search' : '163',\
'Optimization Methods' : '232'}
def findLastPage(self, soup):
max_page = 1
for a in soup.find_all('a'):
if a.text == ' Last ':
max_page = int(a['href'][a['href'].find('(') + 1 : a['href'].find(')')])
break
return max_page
def processEventData(self, subject):
r = requests.get('http://videolectures.net/site/ajax/drilldown/?t=evt&cid=13&w=5')
soup = BeautifulSoup(r.text)
max_page = self.findLastPage(soup)
file_name = self.get_file_name('eecs/' + self.school + '/' + subject, self.school)
file_lines = self.countFileLineNum(file_name)
f = self.open_db(file_name + ".tmp")
self.count = 0
urls_list = []
for page in range(1, max_page + 1):
r = requests.get('http://videolectures.net/site/ajax/drilldown/?o=top&t=evt&p=' + str(page) + '&cid=13&w=5')
soup = BeautifulSoup(r.text)
for a in soup.find_all('a'):
if a.attrs.has_key('lang'):
urls_list.append('http://videolectures.net' + a['href'])
i = 0
title = ''
desc = ''
for span in soup.find_all('span'):
i += 1
if i == 1:
print title
title = span.text.strip()
if i == 2:
desc = 'description:' + span.text.strip() + ' '
if i == 3:
desc += span.text.strip()
self.count += 1
self.write_db(f, subject + '-' + str(self.count), title, urls_list[self.count - 1], desc)
i = 0
self.close_db(f)
if file_lines != self.count and self.count > 0:
self.do_upgrade_db(file_name)
print "before lines: " + str(file_lines) + " after update: " + str(self.count) + " \n\n"
else:
self.cancel_upgrade(file_name)
print "no need upgrade\n"
def processData(self, subject):
file_name = self.get_file_name('eecs/' + self.school + '/' + subject, self.school)
file_lines = self.countFileLineNum(file_name)
f = self.open_db(file_name + ".tmp")
self.count = 0
print 'processing ' + subject
for s in self.type_map.keys():
r = requests.get('http://videolectures.net/site/ajax/drilldown/?t=' + self.type_map.get(s) + '&cid=' + self.subject_cid_map.get(subject) + '&w=5')
soup = BeautifulSoup(r.text)
max_page = self.findLastPage(soup)
for page in range(1, max_page + 1):
r = requests.get('http://videolectures.net/site/ajax/drilldown/?o=top&t=' + self.type_map.get(s) + '&p=' + str(page) + '&cid=' + self.subject_cid_map.get(subject) + '&w=5')
soup = BeautifulSoup(r.text)
for div in soup.find_all('div', class_='lec_thumb'):
instructors = ''
title = div.a.span.span.text.strip()
url = 'http://videolectures.net' + div.a['href']
soup1 = BeautifulSoup(div.prettify())
div = soup1.find('div', class_='author')
if div != None and div.span != None:
instructors = 'instructors:' + div.span.text.strip()
self.count += 1
vl_num = 'vl-' + str(self.subject_cid_map.get(subject)) + '-' + str(self.count)
print vl_num + ' ' + title
self.write_db(f, vl_num, title, url, instructors)
self.close_db(f)
if file_lines != self.count and self.count > 0:
self.do_upgrade_db(file_name)
print "before lines: " + str(file_lines) + " after update: " + str(self.count) + " \n\n"
else:
self.cancel_upgrade(file_name)
print "no need upgrade\n"
def upFirstChar(self, text):
result = ''
for i in range(0, len(text)):
if (i > 0 and text[i - 1] == ' ') or i == 0:
result += str(text[i]).upper()
else:
result += text[i]
return result.strip()
def getNameAndDescription(self, url):
name = ''
homepage = ''
desc = ''
r = requests.get(url)
soup = BeautifulSoup(r.text)
span_name = soup.find('span', class_='auth_name')
span_desc = soup.find("span", id="auth_desc_edit")
if span_name != None and span_name.a != None:
name = span_name.a.text.replace(' ',' ').strip()
homepage = span_name.a['href']
desc += 'homepage:' + homepage + ' '
if span_desc != None:
desc += 'description:' + span_desc.text.replace('\n', ' ').strip()
return name, desc
def processUserData(self):
print 'processing user data'
file_name = self.get_file_name('eecs/' + self.school + '/user', self.school)
file_lines = self.countFileLineNum(file_name)
f = self.open_db(file_name + ".tmp")
self.count = 0
user_dict= {}
for page in range(1, 24):
r = requests.get('http://videolectures.net/site/list/authors/?page=' + str(page))
soup = BeautifulSoup(r.text)
for tr in soup.find_all('tr'):
if tr.text.find('Author') == -1:
soup1 = BeautifulSoup(tr.prettify())
video_pos = tr.text.find('video')
views_pos = tr.text.find('views')
url = 'http://videolectures.net' + soup1.find('a')['href']
desc = ''
vl_id = ''
title = self.upFirstChar(soup1.find('a')['href'][1:].replace('/','').replace('_', ' '))
self.count += 1
if tr.text.find('videos') != -1:
vl_id = str(tr.text[video_pos + 6 : views_pos].strip()) + '-' + str(self.count)
else:
vl_id = str(tr.text[video_pos + 5 : views_pos].strip()) + '-' + str(self.count)
desc = 'organization:' + tr.text[views_pos + 5 :]
if views_pos == -1:
vl_id = '0' + '-' + str(self.count)
desc = 'organization:' + tr.text[video_pos + 5 :]
print vl_id + ' ' + title
user_dict[vl_id] = Record(self.get_storage_format(vl_id, title, url, desc))
self.count = 0
for item in sorted(user_dict.items(), key=lambda user_dict:int(user_dict[1].get_id()[0 : user_dict[1].get_id().find('-')].strip()), reverse=True):
self.count += 1
name = ''
desc = ''
if self.count <= 100 and item[1].get_url().strip().startswith('http'):
name, desc = self.getNameAndDescription(item[1].get_url().strip())
uid = 'vl-' + item[1].get_id()[0 : item[1].get_id().find('-')] + '-' + str(self.count)
if name == '':
name = item[1].get_title().strip()
#print uid + ' ' + name
self.write_db(f, uid, name, item[1].get_url().strip(), item[1].get_describe().strip() + ' ' + desc)
self.close_db(f)
if file_lines != self.count and self.count > 0:
self.do_upgrade_db(file_name)
print "before lines: " + str(file_lines) + " after update: " + str(self.count) + " \n\n"
else:
self.cancel_upgrade(file_name)
print "no need upgrade\n"
def doWork(self):
self.processEventData('event')
for subject in self.subject_cid_map.keys():
self.processData(subject)
self.processUserData()
start = VideolecturesSpider()
start.doWork()
| mit | -8,895,108,662,119,555,000 | 42.415094 | 188 | 0.464146 | false | 3.782984 | false | false | false |
orbnauticus/Pique | pique/common.py | 1 | 3022 | #!/usr/bin/env python
#
# Copyright (c) 2010, Ryan Marquardt
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the project nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import collections
import os.path
import re
import sys
import threading
DEBUG = True
debug_out_lock = threading.Lock()
def debug(*args):
if DEBUG:
with debug_out_lock:
print >>sys.stderr, threading.currentThread().name + ':', ' '.join(map(str,args))
VERBOSE = True
def verbose(*args):
if VERBOSE:
print >>sys.stderr, ' '.join(map(str,args))
TIME_FORMAT='hms'
SECOND = 1e9
NETPORT = 8145
BUFSIZE = 1<<12 #4096
class Time(long):
@classmethod
def FromNS(cls, ns):
return Time(ns)
@classmethod
def FromSec(cls, s):
return Time(s*SECOND)
def __repr__(self):
return self.format('s.')
def __str__(self):
return self.format('hms')
def format(self, f):
if f == 'hms':
m,s = divmod(self/SECOND, 60)
h,m = divmod(m,60)
return '%d:%02d:%02d' % (h,m,s) if h else '%d:%02d' % (m,s)
elif f == 's.':
return '%f' % (self / float(SECOND))
def uri(path):
return path if re.match('[a-zA-Z0-9]+://.*', path) else 'file://' + os.path.abspath(path)
class PObject(object):
def connect(self, which, func, *args, **kwargs):
try:
self.__callbacks
except AttributeError:
self.__callbacks = collections.defaultdict(list)
self.__callbacks[which].append((func,args,kwargs))
def emit(self, signal, *args):
try:
cbs = iter(self.__callbacks[signal])
except AttributeError:
pass
else:
for f,a,k in cbs:
f(*(args+a), **k)
def hasattrs(obj, attrs):
return all(hasattr(obj,a) for a in attrs)
| bsd-3-clause | 1,251,871,808,911,474,200 | 30.154639 | 90 | 0.709795 | false | 3.465596 | false | false | false |
tsifrer/python-twitch-client | tests/api/test_clips.py | 1 | 1364 | import json
import responses
from twitch.client import TwitchClient
from twitch.constants import BASE_URL
from twitch.resources import Clip
example_clip = {
"broadcast_id": "25782478272",
"title": "cold ace",
"tracking_id": "102382269",
"url": "https://clips.twitch.tv/OpenUglySnoodVoteNay?tt_medium=clips_api&tt_content=url",
}
example_clips = {"clips": [example_clip]}
@responses.activate
def test_get_by_slug():
slug = "OpenUglySnoodVoteNay"
responses.add(
responses.GET,
"{}clips/{}".format(BASE_URL, slug),
body=json.dumps(example_clip),
status=200,
content_type="application/json",
)
client = TwitchClient("client id")
clip = client.clips.get_by_slug(slug)
assert isinstance(clip, Clip)
assert clip.broadcast_id == example_clip["broadcast_id"]
@responses.activate
def test_get_top():
params = {"limit": 1, "period": "month"}
responses.add(
responses.GET,
"{}clips/top".format(BASE_URL),
body=json.dumps(example_clips),
status=200,
content_type="application/json",
)
client = TwitchClient("client id")
clips = client.clips.get_top(**params)
assert len(clips) == len(example_clips)
assert isinstance(clips[0], Clip)
assert clips[0].broadcast_id == example_clips["clips"][0]["broadcast_id"]
| mit | -1,391,993,542,125,280,800 | 23.8 | 93 | 0.647361 | false | 3.194379 | false | false | false |
CERT-W/certitude | utils/ioc-selector.py | 2 | 3439 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import re
import xml.etree.ElementTree as ET
selectors = {'ServiceItem': ['descriptiveName', 'mode', 'path', 'pathmd5sum', 'status', 'name'],
'RegistryItem': ['KeyPath', 'ValueName'],
'FileItem': ['FilePath', 'FullPath', 'FileExtension', 'FileName'],
'ArpEntryItem': ['Interface', 'IPv4Address', 'PhysicalAddress', 'CacheType'],
'DnsEntryItem': ['RecordName', 'RecordType', 'TimeToLive', 'DataLength', 'RecordData/Host',
'RecordData/IPv4Address'],
'PortItem': ['protocol', 'localIP', 'localPort', 'remoteIP', 'remotePort', 'state', 'pid'],
'PrefetchItem': ['PrefetchHash', 'ApplicationFileName', 'ReportedSizeInBytes', 'SizeInBytes',
'TimesExecuted', 'FullPath'],
'ProcessItem': ['pid', 'parentpid', 'UserSID', 'Username', 'name', 'path', 'HandleList/Handle/Type', 'HandleList/Handle/Name'],
'MemoryItem': ['pid', 'parentpid', 'name', 'page_addr', 'page_size', 'access_read', 'access_write',
'access_execute', 'access_copy_on_write']
}
def multiprompt(options, all=False):
regex = re.compile('(\d+|\*)' if all else '(\d)')
for counter, opt in enumerate(options):
print '({})\t{}'.format(counter + 1, opt)
if all:
print '(*)\tAll of them'
user_input = raw_input('> ')
if not regex.search(user_input):
print '\n[>] Please enter a valid value.'
return multiprompt(options)
return user_input if user_input == '*' else int(user_input) - 1
def setSelectAttribute(items, choosenItem, choosenSelector):
context = items[choosenItem].find('Context')
document = context.get('document')
context.set('select', '{}/{}'.format(document, selectors[document][choosenSelector]))
if __name__ == '__main__':
if len(sys.argv) < 2:
print 'Usage : python ioc-selector.py [ioc path]'
exit(1)
try :
tree = ET.parse(sys.argv[1])
except IOError:
print 'Your IOC file was not found.'
exit(1)
# Stripping IOC namespaces
for el in tree.getroot().iter():
if '}' in el.tag:
el.tag = el.tag.split('}', 1)[1]
root = tree.getroot()
# Getting all indicator items elements
items = root.findall('.//IndicatorItem')
itemsList = []
for i in items:
itemsList.append('{} {} {}'.format(i.find('Context').get('search'), i.get('condition'), i.find('Content').text))
print '[>] Which indicator item would you like to edit?'
choice = multiprompt(itemsList, True)
print '\n[>] Which attribute would you like to select?'
if choice == '*':
print '[!] All the indicators will get the same \'select\' attribute.'
document = items[0].find('Context').get('document')
selec = multiprompt(selectors[document])
for nb in range(len(items)):
setSelectAttribute(items, nb, selec)
else:
document = items[choice].find('Context').get('document')
selec = multiprompt(selectors[document])
setSelectAttribute(items, choice, selec)
try:
filename = sys.argv[1] + '-select'
tree.write(filename)
print '[>] File successfully saved as ' + filename
except Exception as e:
print '[X] Something happened' + str(e)
| gpl-2.0 | -8,050,126,690,803,244,000 | 35.2 | 140 | 0.587089 | false | 3.783278 | false | false | false |
xiaojieluo/amazingtool | amazingtool/api/handler/CodeHandler.py | 1 | 5667 | from api.handler.APIHandler import APIHandler
import asyncio
import tornado
import hashlib
import tasks
import api.web
import base64
class index(APIHandler):
def get(self):
pass
encrypt_key = 'api.encrypt.{text}'
decrypt_key = 'api.decrypt.{text}'
class encrypt(APIHandler):
'''
加密 api,请求示例
uri/encrypt?type='md5|sha256|sha512'&text=hello
参数:
type : 表示加密类型,当类型有多个时, 使用 | 分割
text : 表示要加密的源数据
'''
# 该 api 支持的加密算法,过滤 type 参数用
TYPE = ('md5', 'sha1','sha224','sha256','sha384','sha512', 'blake2b')
@tornado.web.authenticated
async def get(self, text):
# 不指定 type, 则返回所有类型的加密数据
types = (self.get_argument('type', '|'.join(self.TYPE))).split('|')
result = dict()
for type_ in types:
if type_ in self.TYPE:
tmp = self.cache.exists(encrypt_key.format(text=text))
if tmp and self.cache.hexists(encrypt_key.format(text=text), type_):
cache = self.cache.hget(encrypt_key.format(text=text), type_)
result[type_] = bytes.decode(cache)
else:
print("不存在")
result[type_] = await self.encrypt(type_, text)
await self.update_cache(type_, {'text': text, 'result': result})
else:
result[type_] = 'The encryption algorithm is not supported at this time'
data = dict(query = text,result = result)
self.write_json(data)
async def encrypt(self, type_, text, charset='utf-8'):
'''
抽象的加密函数,利用 python 的反射机制,执行 hashlib 相应的加密函数,并更新加密数据库中的资料
参数:
type_ : 加密类型
text : 需要加密的源数据
'''
if hasattr(hashlib, type_):
result = getattr(hashlib, type_)(text.encode(charset)).hexdigest()
return result
async def update_cache(self, type_, data):
'''
異布更新緩存與數據庫
'''
text = data.get('text', '')
result = data.get('result', '')
self.cache.hmset(encrypt_key.format(text=text), {type_:result[type_]})
self.cache.hmset(decrypt_key.format(text=result[type_]), {type_:text})
tmp = {'text':text, 'result':data['result'][type_]}
await self.update(tmp, type_)
class decrypt(APIHandler):
'''
解密 api,请求示例
uri/decrypt?type='md5|sha256|sha512'&text=hello
参数:
type : 表示密文类型,有多个类型时, 使用 | 分割,当不确定类型时,可以留空
text : 表示要加密的源数据
'''
# 该 api 支持的解密算法,过滤 type 参数用
TYPE = ('md5', 'sha1','sha224','sha256','sha384','sha512', 'blake2b')
async def get(self, text):
types = (self.get_argument('type', '|'.join(self.TYPE))).split('|')
result = dict()
for type_ in types:
if type_ in self.TYPE:
if self.cache.hexists(decrypt_key.format(text=text), type_):
# 命中緩存
cache = self.cache.hget(decrypt_key.format(text=text), type_)
result[type_] = bytes.decode(cache)
else:
result[type_] = await self.decrypt(type_, text)
else:
result[type_] = 'The encryption algorithm is not supported at this time'
data = dict(
query = text,
result = result
)
self.write_json(data)
async def decrypt(self, type_, text, charset='utf-8'):
'''
抽象的解密函数,利用 python 的反射机制,执行 hashlib 相应的加密函数,并更新加密数据库中的资料
参数:
type_ : 加密类型
text : 需要加密的源数据
'''
result = self.find({'result':text}, type_)
if result:
return result
class encode(APIHandler):
'''
加密 api,请求示例
uri/encrypt?type=base64&text=hello
参数:
type : 表示加密类型,当类型有多个时, 使用 | 分割
text : 表示要加密的源数据
'''
# 该 api 支持的加密算法,过滤 type 参数用
TYPE = ('base16','base32', 'base64', 'base85')
async def get(self, text):
types = (self.get_argument('type', '|'.join(self.TYPE))).split('|')
result = dict()
for type_ in types:
if type_ in self.TYPE:
result[type_] = await self.encode(type_, text)
else:
result[type_] = 'The encryption algorithm is no t supported at this time'
data = dict(
query = text,
result = result
)
self.write_json(data)
async def encode(self, type_, text, charset='utf-8'):
'''
抽象的编码函数,利用 python 的反射机制,执行 base64 相应的加密函数,并更新 编码 数据库中的资料
参数:
type_ : 编码类型
text : 需要编码的源数据
'''
# 组合 base 编码名称,转换成 base64 库 需要的格式
types = (type_[0:1]+type_[-2:]+'encode')
if hasattr(base64, types):
result = getattr(base64, types)(text.encode()).decode()
return result
| gpl-2.0 | 4,518,796,194,304,270,000 | 28.625 | 92 | 0.521398 | false | 3.018193 | false | false | false |
eyp-developers/statistics | statistics/views/api/session.py | 1 | 2879 | import json
from decimal import Decimal
from django.http import HttpResponse
from statistics.models import Session, Committee, Point, ContentPoint
def session_api(request, session_id):
# Since the graphs on the session page need to be able to livereload, we need to create
# a custom "API" that outputs the neccesary JSON to keep the graph alive
session = Session.objects.get(pk=session_id)
#First we need all the committees registered for that session
committees = Committee.objects.filter(session__id=session_id).order_by('name')
#Then we need all the available points, direct responses and votes
if session.session_statistics != 'C':
all_points = Point.objects.filter(session_id=session_id).order_by('timestamp')
points = Point.objects.filter(session_id=session_id).filter(point_type='P')
drs = Point.objects.filter(session_id=session_id).filter(point_type='DR')
else:
all_points = ContentPoint.objects.filter(session_id=session_id).order_by('timestamp')
points = ContentPoint.objects.filter(session_id=session_id).filter(point_type='P')
drs = ContentPoint.objects.filter(session_id=session_id).filter(point_type='DR')
#Then we need a list of each of them.
committee_list = []
points_list = []
drs_list = []
if not all_points:
session_json = json.dumps({
'committees': '',
'points': '',
'drs': '',
'total_points': '0',
'type_point': '',
'type_dr': '',
'ppm': '',
})
else:
total_points = all_points.count()
type_point = points.count()
type_dr = drs.count()
first_point = all_points.first().timestamp
latest_point = all_points.last().timestamp
time_diff = latest_point - first_point
minutes = (time_diff.days * 1440) + (time_diff.seconds / 60)
if total_points > 0:
mpp = Decimal(minutes) / Decimal(total_points)
else:
mpp = 0
#For each committee,
for committee in committees:
#Let c be the name
c = committee.name
#p be the count of points
p = points.filter(committee_by=committee).count()
#and d be the count of DRs.
d = drs.filter(committee_by=committee).count()
#Append each newly made variable to our nice lists.
committee_list.append(c)
points_list.append(p)
drs_list.append(d)
#Finally output the result as JSON
session_json = json.dumps({
'committees': committee_list,
'points': points_list,
'drs': drs_list,
'total_points': total_points,
'type_point': type_point,
'type_dr': type_dr,
'mpp': str(round(mpp, 3)),
})
return HttpResponse(session_json, content_type='json')
| gpl-3.0 | -2,847,598,486,814,620,000 | 36.38961 | 93 | 0.609239 | false | 3.72445 | false | false | false |
quantumlib/Cirq | cirq-google/cirq_google/devices/xmon_device.py | 1 | 8259 | # Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, cast, Iterable, List, Optional, Set, TYPE_CHECKING, FrozenSet
import cirq
from cirq_google.optimizers import convert_to_xmon_gates
if TYPE_CHECKING:
import cirq
@cirq.value_equality
class XmonDevice(cirq.Device):
"""A device with qubits placed in a grid. Neighboring qubits can interact."""
def __init__(
self,
measurement_duration: cirq.DURATION_LIKE,
exp_w_duration: cirq.DURATION_LIKE,
exp_11_duration: cirq.DURATION_LIKE,
qubits: Iterable[cirq.GridQubit],
) -> None:
"""Initializes the description of an xmon device.
Args:
measurement_duration: The maximum duration of a measurement.
exp_w_duration: The maximum duration of an ExpW operation.
exp_11_duration: The maximum duration of an ExpZ operation.
qubits: Qubits on the device, identified by their x, y location.
"""
self._measurement_duration = cirq.Duration(measurement_duration)
self._exp_w_duration = cirq.Duration(exp_w_duration)
self._exp_z_duration = cirq.Duration(exp_11_duration)
self.qubits = frozenset(qubits)
def qubit_set(self) -> FrozenSet[cirq.GridQubit]:
return self.qubits
def decompose_operation(self, operation: cirq.Operation) -> cirq.OP_TREE:
return convert_to_xmon_gates.ConvertToXmonGates().convert(operation)
def neighbors_of(self, qubit: cirq.GridQubit):
"""Returns the qubits that the given qubit can interact with."""
possibles = [
cirq.GridQubit(qubit.row + 1, qubit.col),
cirq.GridQubit(qubit.row - 1, qubit.col),
cirq.GridQubit(qubit.row, qubit.col + 1),
cirq.GridQubit(qubit.row, qubit.col - 1),
]
return [e for e in possibles if e in self.qubits]
def duration_of(self, operation):
if isinstance(operation.gate, cirq.CZPowGate):
return self._exp_z_duration
if isinstance(operation.gate, cirq.MeasurementGate):
return self._measurement_duration
if isinstance(operation.gate, (cirq.XPowGate, cirq.YPowGate, cirq.PhasedXPowGate)):
return self._exp_w_duration
if isinstance(operation.gate, cirq.ZPowGate):
# Z gates are performed in the control software.
return cirq.Duration()
raise ValueError(f'Unsupported gate type: {operation!r}')
@classmethod
def is_supported_gate(cls, gate: cirq.Gate):
"""Returns true if the gate is allowed."""
return isinstance(
gate,
(
cirq.CZPowGate,
cirq.XPowGate,
cirq.YPowGate,
cirq.PhasedXPowGate,
cirq.MeasurementGate,
cirq.ZPowGate,
),
)
def validate_gate(self, gate: cirq.Gate):
"""Raises an error if the given gate isn't allowed.
Raises:
ValueError: Unsupported gate.
"""
if not self.is_supported_gate(gate):
raise ValueError(f'Unsupported gate type: {gate!r}')
def validate_operation(self, operation: cirq.Operation):
if not isinstance(operation, cirq.GateOperation):
raise ValueError(f'Unsupported operation: {operation!r}')
self.validate_gate(operation.gate)
for q in operation.qubits:
if not isinstance(q, cirq.GridQubit):
raise ValueError(f'Unsupported qubit type: {q!r}')
if q not in self.qubits:
raise ValueError(f'Qubit not on device: {q!r}')
if len(operation.qubits) == 2 and not isinstance(operation.gate, cirq.MeasurementGate):
p, q = operation.qubits
if not cast(cirq.GridQubit, p).is_adjacent(q):
raise ValueError(f'Non-local interaction: {operation!r}.')
def _check_if_exp11_operation_interacts_with_any(
self, exp11_op: cirq.GateOperation, others: Iterable[cirq.GateOperation]
) -> bool:
return any(self._check_if_exp11_operation_interacts(exp11_op, op) for op in others)
def _check_if_exp11_operation_interacts(
self, exp11_op: cirq.GateOperation, other_op: cirq.GateOperation
) -> bool:
if isinstance(
other_op.gate,
(
cirq.XPowGate,
cirq.YPowGate,
cirq.PhasedXPowGate,
cirq.MeasurementGate,
cirq.ZPowGate,
),
):
return False
return any(
cast(cirq.GridQubit, q).is_adjacent(cast(cirq.GridQubit, p))
for q in exp11_op.qubits
for p in other_op.qubits
)
def validate_circuit(self, circuit: cirq.Circuit):
super().validate_circuit(circuit)
_verify_unique_measurement_keys(circuit.all_operations())
def validate_moment(self, moment: cirq.Moment):
super().validate_moment(moment)
for op in moment.operations:
if isinstance(op.gate, cirq.CZPowGate):
for other in moment.operations:
if other is not op and self._check_if_exp11_operation_interacts(
cast(cirq.GateOperation, op), cast(cirq.GateOperation, other)
):
raise ValueError(f'Adjacent Exp11 operations: {moment}.')
def can_add_operation_into_moment(self, operation: cirq.Operation, moment: cirq.Moment) -> bool:
self.validate_moment(moment)
if not super().can_add_operation_into_moment(operation, moment):
return False
if isinstance(operation.gate, cirq.CZPowGate):
return not self._check_if_exp11_operation_interacts_with_any(
cast(cirq.GateOperation, operation),
cast(Iterable[cirq.GateOperation], moment.operations),
)
return True
def at(self, row: int, col: int) -> Optional[cirq.GridQubit]:
"""Returns the qubit at the given position, if there is one, else None."""
q = cirq.GridQubit(row, col)
return q if q in self.qubits else None
def row(self, row: int) -> List[cirq.GridQubit]:
"""Returns the qubits in the given row, in ascending order."""
return sorted(q for q in self.qubits if q.row == row)
def col(self, col: int) -> List[cirq.GridQubit]:
"""Returns the qubits in the given column, in ascending order."""
return sorted(q for q in self.qubits if q.col == col)
def __repr__(self) -> str:
return (
'XmonDevice('
f'measurement_duration={self._measurement_duration!r}, '
f'exp_w_duration={self._exp_w_duration!r}, '
f'exp_11_duration={self._exp_z_duration!r} '
f'qubits={sorted(self.qubits)!r})'
)
def __str__(self) -> str:
diagram = cirq.TextDiagramDrawer()
for q in self.qubits:
diagram.write(q.col, q.row, str(q))
for q2 in self.neighbors_of(q):
diagram.grid_line(q.col, q.row, q2.col, q2.row)
return diagram.render(horizontal_spacing=3, vertical_spacing=2, use_unicode_characters=True)
def _value_equality_values_(self) -> Any:
return (self._measurement_duration, self._exp_w_duration, self._exp_z_duration, self.qubits)
def _verify_unique_measurement_keys(operations: Iterable[cirq.Operation]):
seen: Set[str] = set()
for op in operations:
if cirq.is_measurement(op):
key = cirq.measurement_key(op)
if key in seen:
raise ValueError(f'Measurement key {key} repeated')
seen.add(key)
| apache-2.0 | -370,696,558,131,876,600 | 37.957547 | 100 | 0.615934 | false | 3.713579 | false | false | false |
kangwonlee/ECA | lab_07_linear_algebra/linear_algebra.py | 1 | 2220 | # -*- coding: utf8 -*-
# 위 주석은 이 .py 파일 안에 한글이 사용되었다는 점을 표시하는 것임
def dot(a, b):
"""
크기가 같은 두 벡터 a, b의 내적 dot product
"""
# 벡터 a 의 크기.
# 벡터 b 의 크기는 같을 것이라고 가정한다
# (어떤 경우 오류가 발생할 수 있겠는가?)
n = len(a)
result = 0.0
for i in xrange(n):
result += a[i] * b[i]
return result
def multiply_matrix_vector(A, x):
n_row = len(A)
n_column = len(A[0])
result = [0.0] * n_row
for i in xrange(n_row):
result[i] = dot(A[i], x)
return result
def multiply_matrix_matrix(A, B):
n_row = len(A)
n_column = len(B[0])
n_dummy = len(A[0])
n_dummy2 = len(B)
# 행렬 크기 확인
if n_dummy != n_dummy2:
print "Incorrect Matrix Size"
return None
# 행렬을 저장할 공간을 지정
result = []
for i_row in xrange(n_row):
# 각 행을 저장할 공간을 지정
result.append([0.0] * n_column)
# 행 반복문
for i in xrange(n_row):
# 열 반복문
for j in xrange(n_column):
result[i][j] = 0.0
# dummy index
for k in xrange(n_dummy):
result[i][j] += A[i][k] * B[k][j]
return result
def main():
a_vector = [1.0, 0.0]
b_vector = [3.0, 4.0]
a_dot_b = dot(a_vector, b_vector)
print "a =", a_vector
print "b =", b_vector
print "a dot b =", a_dot_b
A_matrix = [[0.0, 1.0],
[1.0, 0.0]]
x_vector = [3.0, 4.0]
A_x = multiply_matrix_vector(A_matrix, x_vector)
print "A =", A_matrix
print "x =", x_vector
print "A*x =", A_x
A_matrix2 = [[0.0, 1.0],
[1.0, 0.0]]
x_vector2T = [[3.0, 4.0]]
x_vector2 = zip(*x_vector2T)
A_x2 = multiply_matrix_matrix(A_matrix2, x_vector2)
print "A2 =", A_matrix2
print "x2 =", x_vector2
print "A2*x2 =", A_x2
B_matrix = [[100, 101],
[110, 111]]
print "A =", A_matrix
print "B =", B_matrix
print "A*B =", multiply_matrix_matrix(A_matrix, B_matrix)
if "__main__" == __name__:
main()
| apache-2.0 | 2,096,145,424,285,517,800 | 19.222222 | 61 | 0.484016 | false | 2.100735 | false | false | false |
bitsydarel/DebiaTranslator | setup.py | 1 | 1255 | import sys
import os
from cx_Freeze import setup, Executable
os.environ['TCL_LIBRARY'] = r"C:\Users\Sasha\AppData\Local\Programs\Python\Python36-32\tcl\tcl8.6"
os.environ['TK_LIBRARY'] = r"C:\Users\Sasha\AppData\Local\Programs\Python\Python36-32\tcl\tk8.6"
includes = ["tkinter"]
include_files = [r"C:\Users\Sasha\AppData\Local\Programs\Python\Python36-32\DLLs\tcl86t.dll", \
r"C:\Users\Sasha\AppData\Local\Programs\Python\Python36-32\DLLs\tk86t.dll",
r"C:\Users\Sasha\AppData\Local\Programs\Python\Python36-32\DLLs\sqlite3.dll",
r"C:\Users\Sasha\Desktop\DebiaTranslator-master\zoom_logo.jpg"]
# Dependencies are automatically detected, but it might need fine tuning.
build_exe_options = {"includes": ["tkinter", "os", "platform"]}
# GUI applications require a different base on Windows (the default is for a
# console application).
base = None
if sys.platform == "win32":
base = "Win32GUI"
setup( name = "Devia Translator",
version = "0.1",
description = "Devia Translator application!",
options = {"build_exe": {"includes": includes, "include_files": include_files}},
executables = [Executable("devia_translator.py", base=base)])
| gpl-3.0 | 1,380,373,096,521,520,400 | 46.269231 | 98 | 0.6749 | false | 3.098765 | false | false | false |
matokeotz/matokeo-api | app/api/models/school_models.py | 1 | 1204 | from __future__ import unicode_literals
from django.db import models
from datetime import datetime
from django.utils import timezone
from django.db.models import Model, CharField, ForeignKey, IntegerField, DecimalField, BooleanField, DateTimeField
class School(Model):
'''
Stores information on a single school.
'''
cno = CharField(max_length=10, null=False)
short_name = CharField(max_length=50, null=False, unique=True, db_index=True)
long_name = CharField(max_length=100, null=False)
gender = CharField(max_length=2, null=False, default="MF")
stype = CharField(max_length=2, null=False, default="U")
level = CharField(max_length=2, null=False, default="U")
def __str__(self):
return self.short_name
def __unicode__(self):
return self.short_name
class SchoolStatistics(Model):
'''
Stores the years for which data is available
for a specific school. Relates to
:model:`api.School`.
'''
school = ForeignKey(School)
exam_year = IntegerField(db_index=True)
div_name = CharField(max_length=10, null=False)
div_count = IntegerField()
def __str__(self):
return str(self.school) | mit | -2,495,854,814,962,971,600 | 30.710526 | 114 | 0.675249 | false | 3.774295 | false | false | false |
hovo1990/deviser | generator/java_utils/insideJSBML_parser.py | 1 | 9517 | #!/usr/bin/env python
#
# @file insideJSBML_parser.py
# @brief JSBML classes parser using javap for GSoC 2016
# @author Hovakim Grabski
#
# <!--------------------------------------------------------------------------
#
# Copyright (c) 2013-2015 by the California Institute of Technology
# (California, USA), the European Bioinformatics Institute (EMBL-EBI, UK)
# and the University of Heidelberg (Germany), with support from the National
# Institutes of Health (USA) under grant R01GM070923. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
# Neither the name of the California Institute of Technology (Caltech), nor
# of the European Bioinformatics Institute (EMBL-EBI), nor of the University
# of Heidelberg, nor the names of any contributors, may be used to endorse
# or promote products derived from this software without specific prior
# written permission.
# ------------------------------------------------------------------------ -->
import os
import sys
import time
import subprocess as sub
file_path = os.path.dirname(os.path.abspath(__file__))
jsbml_jar = 'jsbml-1.1-with-dependencies.jar'
curr_dir = os.getcwd()
def print_output(output):
for line in output:
print(line)
# Clean string line from '' and return list
def clean_line(data):
temp = []
for i in data:
if i != '':
temp.append(i)
return temp
def extract_data(temp_data):
# print('temp_data ',temp_data)
# function_name_step1 = temp_data[-1].split(');')
# print(function_name_step1)
function_name = ''
access_type = None
is_abstract = False
return_type = []
arguments = []
of_type = ''
of_type_args = []
# TODO this is the part that includes extends module
if len(temp_data) == 1 and temp_data[-1] == '}':
return
for i in range(len(temp_data)):
if temp_data[0] == 'Compiled':
return
if len(temp_data) == 1 and temp_data[-1] == '}':
return
# Function Arguments extracter
if '(' in temp_data[i]:
# print('i is ',i)
function_name_step1 = temp_data[i].split('(')
# print('function_name_step1 ',function_name_step1)
function_name = function_name_step1[0]
function_index = i
if function_name_step1[-1] != ');':
if ');' in function_name_step1[-1]:
arg = function_name_step1[-1].split(');')[0]
arguments.append(arg)
else:
arg = function_name_step1[-1].split(',')[0]
arguments.append(arg)
for y in range(function_index, len(temp_data)):
# print('y ',temp_data[y])
if ',' in temp_data[y]:
arg = function_name_step1[-1].split(',')[0]
arguments.append(arg)
elif ');' in function_name_step1[-1]:
arg = function_name_step1[-1].split(');')[0]
arguments.append(arg)
elif function_name_step1[-1] == ');':
break
elif '<' in temp_data[i]:
type_of_name_step1 = temp_data[i].split('<')
of_type = type_of_name_step1[0]
type_index = i
if type_of_name_step1[-1] != '>':
if '>' in type_of_name_step1[-1]:
arg = type_of_name_step1[-1].split('>')[0]
of_type_args.append(arg)
else:
arg = type_of_name_step1[-1].split(',')[0]
of_type_args.append(arg)
for y in range(type_index, len(temp_data)):
# print('y ',temp_data[y])
if ',' in temp_data[y]:
arg = type_of_name_step1[-1].split(',')[0]
of_type_args.append(arg)
elif '>' in type_of_name_step1[-1]:
arg = type_of_name_step1[-1].split('>')[0]
of_type_args.append(arg)
if len(temp_data) > 0:
if temp_data[0] in ['public', 'private', 'protected']:
access_type = temp_data[0]
if len(temp_data) > 1 and temp_data[1] == 'abstract':
is_abstract = True
return_type = temp_data[2]
elif len(temp_data) > 1:
if temp_data[1] == 'void':
return_type = temp_data[1]
else:
# return_type = temp_data[1]
return_type = None
if function_name == '':
return
return {'accessType': access_type, 'isAbstract': is_abstract,
'returnType': return_type, 'functionName': function_name,
'functionArgs': arguments, 'of_type': of_type,
'of_type_args': of_type_args, 'originalData': temp_data}
def parse_extends(extends):
data_extends = {}
data_extends.update({'accessType': extends[0]})
if extends[1] == 'interface':
is_interface = True
data_extends.update({'extendsOriginal': extends[2]})
else:
is_interface = False
data_extends.update({'extendsOriginal': extends[3]})
data_extends.update({'isInterface': is_interface})
if extends[1] == 'class':
is_class = True
else:
is_class = False
data_extends.update({'isClass': is_class})
data_extends.update({'extendsFull': extends[-2]})
extend_short = extends[-2].split('.')[-1]
data_extends.update({'extendsShort': extend_short})
data_extends.update({'fullText': extends})
return data_extends
def parse_output(output):
final_data = {}
output_data = []
for line in output:
# print(line)
data_stage1 = line.split('\n')
# print(data_stage1)
data_stage2 = data_stage1[0].split(' ')
# Need to catch extend here
if 'extends' in data_stage2:
final_data.update({'extends': parse_extends(data_stage2)})
temp_data = clean_line(data_stage2)
data = extract_data(temp_data)
if data is not None:
output_data.append(data)
final_data.update({'modules': output_data})
return final_data # output_data
def get_class_information(class_name=None, individual_run=False, extract_data=False):
if class_name == 'AbstractSBasePlugin':
# class_name = 'org.sbml.jsbml.ext.{0}'.format(class_name)
return
else:
class_name = 'org.sbml.jsbml.{0}'.format(class_name)
# Old version
# command = 'javap -cp {0}{1}{2} -package {3}'.format(file_path, os.sep, jsbml_jar, class_name)
# TODO inside JSBML parser debugging test
# comm1 = 'javap_wrong'
comm1 = 'javap'
comm2 = '-cp'
comm3 = '{0}{1}{2}'.format(file_path, os.sep, jsbml_jar)
comm4 = '-package'
comm5 = '{0}'.format(class_name)
total_command = [comm1, comm2, comm3, comm4, comm5]
try:
class_info = sub.Popen(total_command, stdout=sub.PIPE, stderr=sub.PIPE)
stdout, stderr = class_info.communicate()
if stdout:
# For debugging purposes
# print(stdout)
stdout_value = stdout.decode() # decode("utf-8")
class_output = stdout_value.split('\n')
dict_data = parse_output(class_output)
return dict_data
elif stderr:
error_txt = stderr.decode()
# print('ERROR is', error_txt)
if 'Error: class not found:' in error_txt:
return
else:
if extract_data is False:
print('Check if Java SDK is installed, deviser requires javap')
sys.exit(0)
else:
return
except Exception as error:
if extract_data is False:
print('Error is ', error)
print('Check if Java SDK is installed, deviser requires javap')
sys.exit(0)
# For testing purposes
# class_name = 'org.sbml.jsbml.AbstractNamedSBase'
# class_name = 'CompartmentalizedSBase'
# class_name = 'Compartment'
# class_name = 'SBaseWithDerivedUnit'
# class_name = 'NamedSBaseWithDerivedUnit'
# class_name = 'UniqueNamedSBase'
# TODO for individual tests of javap parser
# #Exist but no data
# class_name = 'AbstractSBasePlugin'
# data = get_class_information(class_name, individual_run=True)
# print(data)
# data = get_class_information(class_name, individual_run=True)
# print(data)
| lgpl-2.1 | 6,224,505,022,625,598,000 | 33.988971 | 99 | 0.572449 | false | 3.732157 | false | false | false |
calancha/DIRAC | Interfaces/scripts/dirac-wms-get-queue-normalization.py | 15 | 1254 | #!/usr/bin/env python
########################################################################
# $HeadURL$
# File : dirac-wms-get-queue-normalization.py
# Author : Ricardo Graciani
########################################################################
"""
Report Normalization Factor applied by Site to the given Queue
"""
__RCSID__ = "$Id$"
import DIRAC
from DIRAC.Core.Base import Script
from DIRAC.WorkloadManagementSystem.Client.CPUNormalization import getQueueNormalization
Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1],
'Usage:',
' %s [option|cfgfile] ... Queue ...' % Script.scriptName,
'Arguments:',
' Queue: GlueCEUniqueID of the Queue (ie, juk.nikhef.nl:8443/cream-pbs-lhcb)' ] ) )
Script.parseCommandLine( ignoreErrors = True )
args = Script.getPositionalArgs()
if len( args ) < 1:
Script.showHelp()
exitCode = 0
for ceUniqueID in args:
cpuNorm = getQueueNormalization( ceUniqueID )
if not cpuNorm['OK']:
print 'ERROR %s:' % ceUniqueID, cpuNorm['Message']
exitCode = 2
continue
print ceUniqueID, cpuNorm['Value']
DIRAC.exit( exitCode )
| gpl-3.0 | 5,508,469,330,396,502,000 | 30.35 | 125 | 0.532695 | false | 3.955836 | false | false | false |
sedwards2009/extraterm | extraterm/src/commands/exshow.py | 1 | 4635 | #!/usr/bin/env python3
#
# Copyright 2014-2017 Simon Edwards <[email protected]>
#
# This source code is licensed under the MIT license which is detailed in the LICENSE.txt file.
#
import argparse
import atexit
import base64
import hashlib
import os
import os.path
import sys
import termios
##@inline
from extratermclient import extratermclient
MAX_CHUNK_BYTES = 3 * 1024 # This is kept a multiple of 3 to avoid padding in the base64 representation.
def SendMimeTypeDataFromFile(filename, mimeType, charset, filenameMeta=None, download=False):
filesize = os.path.getsize(filename)
with open(filename,'rb') as fhandle:
SendMimeTypeData(fhandle,
filename if filenameMeta is None else filenameMeta,
mimeType,
charset,
filesize=filesize,
download=download)
def SendMimeTypeDataFromStdin(mimeType, charset, filenameMeta=None, download=False):
SendMimeTypeData(sys.stdin.buffer, filenameMeta, mimeType, charset, download)
def SendMimeTypeData(fhandle, filename, mimeType, charset, filesize=-1, download=False):
TurnOffEcho()
extratermclient.startFileTransfer(mimeType, charset, filename, filesize=filesize, download=download)
contents = fhandle.read(MAX_CHUNK_BYTES)
previousHash = b""
previousHashHex = ""
while len(contents) != 0:
hash = hashlib.sha256()
hash.update(previousHash)
hash.update(contents)
print("D:", end='')
print(base64.b64encode(contents).decode(), end='')
print(":", end='')
previousHashHex = hash.hexdigest()
print(previousHashHex)
previousHash = hash.digest()
contents = fhandle.read(MAX_CHUNK_BYTES)
print("E::", end='')
hash = hashlib.sha256()
hash.update(previousHash)
print(hash.hexdigest())
extratermclient.endFileTransfer()
def ShowFile(filename, mimeType=None, charset=None, filenameMeta=None, download=False):
if os.path.exists(filename):
SendMimeTypeDataFromFile(filename, mimeType, charset, filenameMeta, download)
return 0
else:
print("Unable to open file {0}.".format(filename))
return 3
def ShowStdin(mimeType=None, charset=None, filenameMeta=None, download=False):
SendMimeTypeDataFromStdin(mimeType, charset, filenameMeta, download)
def TurnOffEcho():
# Turn off echo on the tty.
fd = sys.stdin.fileno()
if not os.isatty(fd):
return
old_settings = termios.tcgetattr(fd)
new_settings = termios.tcgetattr(fd)
new_settings[3] = new_settings[3] & ~termios.ECHO # lflags
termios.tcsetattr(fd, termios.TCSADRAIN, new_settings)
# Set up a hook to restore the tty settings at exit.
def restoreTty():
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
sys.stderr.flush()
atexit.register(restoreTty)
def main():
parser = argparse.ArgumentParser(prog='show', description='Show a file inside Extraterm.')
parser.add_argument('--charset', dest='charset', action='store', default=None, help='the character set of the input file (default: UTF8)')
parser.add_argument('-d', '--download', dest='download', action='store_true', default=None, help='download the file and don\'t show it')
parser.add_argument('--mimetype', dest='mimetype', action='store', default=None, help='the mime-type of the input file (default: auto-detect)')
parser.add_argument('--filename', dest='filename', action='store', default=None, help='sets the file name in the metadata sent to the terminal (useful when reading from stdin).')
parser.add_argument('-t', '--text', dest='text', action='store_true', default=None, help='Treat the file as plain text.')
parser.add_argument('files', metavar='file', type=str, nargs='*', help='file name. The file data is read from stdin if no files are specified.')
args = parser.parse_args()
if not extratermclient.isExtraterm():
print("Sorry, you're not using Extraterm as your terminal.")
return 1
mimetype = args.mimetype
if args.text:
mimetype = "text/plain"
if len(args.files) != 0:
for filename in args.files:
result = ShowFile(filename, mimeType=mimetype, charset=args.charset, filenameMeta=args.filename,
download=args.download)
if result != 0:
return result
return 0
else:
return ShowStdin(mimeType=mimetype, charset=args.charset, filenameMeta=args.filename,
download=args.download)
main()
| mit | -2,375,877,742,022,258,000 | 38.956897 | 182 | 0.66904 | false | 3.901515 | false | false | false |
Ecpy/i3py | i3py/core/job.py | 1 | 3540 | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright 2018 by I3py Authors, see AUTHORS for more details.
#
# Distributed under the terms of the BSD license.
#
# The full license is in the file LICENCE, distributed with this software.
# -----------------------------------------------------------------------------
"""Object used to handle operation that takes a long time to complete.
"""
import time
from typing import Callable, Optional
class InstrJob(object):
"""Object returned by instrument starting a long running job.
This object can also be used inside a method to handle the waiting of a
condition.
Parameters
----------
condition_callable : Callable
Callable taking no argument and indicating if the job is complete.
expected_waiting_time : float
Expected waiting time for the task to complete in seconds.
cancel : Callable, optional
Function to cancel the task. The job will pass it all the argument it
is called with and the function return value will be returned.
"""
def __init__(self,
condition_callable: Callable[[], bool],
expected_waiting_time: float,
cancel: Optional[Callable]=None) -> None:
self.condition_callable = condition_callable
self.expected_waiting_time = expected_waiting_time
self._cancel = cancel
self._start_time = time.time()
def wait_for_completion(self,
break_condition_callable:
Optional[Callable[[], bool]]=None,
timeout: float=15,
refresh_time: float=1) -> bool:
"""Wait for the task to complete.
Parameters
----------
break_condition_callable : Callable, optional
Callable indicating that we should stop waiting.
timeout : float, optional
Time to wait in seconds in addition to the expected condition time
before breaking.
refresh_time : float, optional
Time interval at which to check the break condition.
Returns
-------
result : bool
Boolean indicating if the wait succeeded of was interrupted.
"""
if break_condition_callable is None:
def no_check():
pass
break_condition_callable = no_check
while True:
remaining_time = (self.expected_waiting_time -
(time.time() - self._start_time))
if remaining_time <= 0:
break
time.sleep(min(refresh_time, remaining_time))
if break_condition_callable():
return False
if self.condition_callable():
return True
timeout_start = time.time()
while True:
remaining_time = (timeout -
(time.time() - timeout_start))
if remaining_time < 0:
return False
time.sleep(min(refresh_time, remaining_time))
if self.condition_callable():
return True
if break_condition_callable():
return False
def cancel(self, *args, **kwargs):
"""Cancel the long running job.
"""
if not self._cancel:
raise RuntimeError('No callable was provided to cancel the task.')
return self._cancel(*args, **kwargs)
| bsd-3-clause | 6,820,111,772,755,745,000 | 33.038462 | 79 | 0.546893 | false | 5.275708 | false | false | false |
RuiShu/Neural-Net-Bayesian-Optimization | sequential/seq_optimizer.py | 1 | 2795 | """
@Author: Rui Shu
@Date: 4/21/15
Performs sequential optimization.
"""
import time
from learning_objective.hidden_function import evaluate, true_evaluate, get_settings
import matplotlib.pyplot as plt
import utilities.optimizer as op
import numpy as np
# Open file to write times for comparison
file_record = open("data/seq_time_data.csv", "a")
# Freeze plotting
plot_it = False
print_statements = False
# Get settings relevant to the hidden function being used
lim_domain, init_size, additional_query_size, init_query, domain, selection_size = get_settings()
# Construct the dataset
dataset = evaluate(init_query[0,:], lim_domain)
print "Randomly query a set of initial points... ",
for query in init_query[1:,:]:
dataset = np.concatenate((dataset, evaluate(query, lim_domain)), axis=0)
print "Complete initial dataset acquired"
# Begin sequential optimization using NN-LR based query system
optimizer = op.Optimizer(dataset, domain)
optimizer.train()
# Select a series of points to query
selected_points = optimizer.select_multiple(selection_size) # (#points, m) array
selection_index = 0
t0 = time.time()
print "Performing optimization..."
for i in range(additional_query_size):
if selection_index == selection_size:
# Update optimizer's dataset and retrain LR
optimizer.retrain_LR()
selected_points = optimizer.select_multiple(selection_size) # Select new points
selection_size = selected_points.shape[0] # Get number of selected points
selection_index = 0 # Restart index
info = "%.3f," % (time.time()-t0)
file_record.write(info)
t0 = time.time()
if (optimizer.get_dataset().shape[0] % 100) == 0:
# Retrain the neural network
optimizer.retrain_NN()
new_data = evaluate(selected_points[selection_index], lim_domain)
optimizer.update_data(new_data)
selection_index += 1
if print_statements:
string1 = "Tasks done: %3d. " % (i+1)
string2 = "New data added to dataset: " + str(new_data)
print string1 + string2
else:
if (i+1) % (additional_query_size/10) == 0:
print "%.3f completion..." % ((i+1.)/additional_query_size)
info = "%.3f," % (time.time()-t0)
file_record.write(info)
file_record.write("NA\n")
file_record.close()
print "Sequential optimization task complete."
print "Best evaluated point is:"
dataset = optimizer.get_dataset()
print dataset[np.argmax(dataset[:, -1]), :]
print "Predicted best point is:"
optimizer.retrain_LR()
domain, pred, hi_ci, lo_ci, nn_pred, ei, gamma = optimizer.get_prediction()
index = np.argmax(pred[:, 0])
print np.concatenate((np.atleast_2d(domain[index, :]), np.atleast_2d(pred[index, 0])), axis=1)[0, :]
| mit | 5,814,549,672,598,117,000 | 31.5 | 100 | 0.671556 | false | 3.537975 | false | false | false |
prk327/CoAca | Algo - DataStru/linkedlist.py | 1 | 4314 |
#linked lists can have several nodes (these nodes contain the data)
class Node(object):
def __init__(self, data):
#this is where we store the data
self.data = data;
#this is a reference to the next node in the linked list
self.next_node = None;
#implementation of the linked list data structure
class LinkedList(object):
def __init__(self):
#we keep a reference to the first node of the linked list
#this is why we can get the first node in O(1)
self.head = None;
#we track the size of the list (how many itemy we have inserted)
self.size = 0;
#inserting at the beginning
#because we store a reference to the first node (head) thats why we just
#have to update the references [it can be done in O(1) running time]
def insert_start(self, data):
#we insert a new item so the size has changed
self.size = self.size + 1;
#create a new Node
new_node = Node(data);
#if the head is NULL - it means it is the first item we insert
if not self.head:
self.head = new_node;
#if there are already items in the linked list (so not the first item)
else:
#we just have to update the references that why it is fast
new_node.next_node = self.head;
self.head = new_node;
#removing an arbitrary item from the list
#first we have to find the item [O(N)] + update the references (so remove it) [O(1)]
#overall running time complexity is O(N) linear running time
def remove(self, data):
#if the linked list is empty we return
if self.head is None:
return;
#we remove item so decrement the size
self.size = self.size - 1;
#first we have to find the node we want to remove. It can be done in O(N)
#basically a simple linear search
current_node = self.head;
previous_node = None;
#we try to find the node we want to get rid of
while current_node.data != data:
previous_node = current_node;
current_node = current_node.next_node;
#if we want to remove the first item (in this case the previous node is NULL)
#NOTE: if there are no references to a given object then GC will delete that node
#so no need to del the unnecessary nodes
if previous_node is None:
self.head = current_node.next_node;
else:
#we remove an item thats not the first one
previous_node.next_node = current_node.next_node;
#because we have a variable thats why this method has O(1) running time
def size1(self):
return self.size;
#we can calculate the size by iterating through the list and counting the number of nodes
def size2(self):
actual_node = self.head;
size = 0;
#because of this it has O(N) linear running time (we can do better!!!)
while actual_node is not None:
size+=1;
actual_node = actual_node.next_node;
return size;
#we want to insert data at the end of the list
#first we have to get to the end of the list [O(N)] + insert a new node [O(1)]
def insert_end(self, data):
#we insert a new node so update the size
self.size = self.size + 1;
#the new node with the data to insert
new_node = Node(data);
actual_node = self.head;
#we have to find the last node (the last node's next_node is NULL)
while actual_node.next_node is not None:
actual_node = actual_node.next_node;
#we insert the new node as the last node's next node
actual_node.next_node = new_node;
#print the nodes in the linked list
#we consider all the nodes one by one so it has O(N) running time
def traverse_list(self):
actual_node = self.head;
#we consider all the nodes in the linked list
while actual_node is not None:
print("%d " % actual_node.data);
actual_node = actual_node.next_node;
# if __name__ == "__main__":
# linkedlist = LinkedList();
# linkedlist.insert_start(12);
# linkedlist.insert_start(122);
# linkedlist.insert_start(3);
# linkedlist.insert_end(31);
# linkedlist.traverse_list();
# linkedlist.remove(3);
# linkedlist.remove(12);
# linkedlist.remove(122);
# linkedlist.remove(31);
# linkedlist.insert_start(12);
# linkedlist.insert_start(122);
# linkedlist.insert_start(3);
# linkedlist.insert_end(31);
# linkedlist.traverse_list();
# print(linkedlist.size1()); | gpl-3.0 | -5,367,774,084,519,510,000 | 28.828571 | 90 | 0.66713 | false | 3.158126 | false | false | false |
g0v/animal.coa | 基隆市/crawler.py | 1 | 8216 | # -*- coding: utf-8 -*-
import os
import csv
import shutil
import re
import sqlite3
import collections
from urlparse import urlparse, urljoin, parse_qs
from datetime import date
from bs4 import BeautifulSoup
import requests
html_path = "htmls"
image_url = "http://www.klaphio.gov.tw/uploadfiles/cd/"
base_url = "http://www.klaphio.gov.tw/receiving_notice.php"
data_schema = collections.OrderedDict((
(u"進所日期:", "enter_date"),
(u"進所原因:", "reason"),
(u"性別:", "gender"),
(u"毛色:", "color"),
(u"品種:", "variety"),
(u"體型:", "body_type"),
(u"晶片號碼:", "wafer_number"),
(u"來源地點:", "source")
))
class DB(object):
def __init__(self, table_name=None):
if not table_name:
raise Exception("table name invalid")
self.conn = sqlite3.connect('animal.db')
self.conn.row_factory = sqlite3.Row
self.cursor = self.conn.cursor()
self.table_name = table_name
self.github_photo_url = "https://g0v.github.io/animal.coa/%E5%9F%BA%E9%9A%86%E5%B8%82/"
try:
sql = "CREATE TABLE %s (id, photo, %s);" % (self.table_name, ",".join(data_schema.values()))
self.cursor.execute(sql)
print "table %s created." % table_name
except Exception as e:
print e
pass
def get_animal(self, animal_id):
sql = "SELECT * FROM %s WHERE id=?;" % self.table_name
self.cursor.execute(sql, (animal_id,))
return self.cursor.fetchone()
def save(self, data):
try:
print "save data to db, id=", data.get("id")
sql = "INSERT INTO %s (id, photo, color, enter_date, source, gender, reason, wafer_number, body_type, variety) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?);" % self.table_name
self.cursor.execute(sql, (
data.get("id"),
data.get("photo"),
data.get("color"),
data.get("enter_date"),
data.get("source"),
data.get("gender"),
data.get("reason"),
data.get("wafer_number"),
data.get("body_type"),
data.get("variety")
))
self.conn.commit()
except Exception as e:
print e
pass
def to_csv(self):
self.cursor.execute("SELECT DISTINCT(enter_date) FROM %s;" % self.table_name)
for (day,) in self.cursor.fetchall():
with open('%s.csv' % day, 'wb') as csvfile:
print "Export csv = %s.csv" % day
spamwriter = csv.writer(csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
spamwriter.writerow(["來源地點", "入園日期", "品種", "備註", "性別", "收容原因", "晶片號碼", "毛色", "體型", "相片網址"])
sql = "SELECT * FROM %s WHERE enter_date = ? ORDER BY id;" % self.table_name
self.cursor.execute(sql, (day,))
for row in self.cursor.fetchall():
photo_url = os.path.join(self.github_photo_url, day, row["photo"].split('/')[-1].lower())
y, m, d = tuple(map(int, row["enter_date"].split('-')))
enter_date = "%d年%d月%d日" % (y - 1911, m, d)
data = [
row["source"].encode('utf-8'),
enter_date,
row["variety"].encode('utf-8'),
u"",
row["gender"].encode('utf-8'),
row["reason"].encode('utf-8'),
row["wafer_number"].encode('utf-8'),
row["color"].encode('utf-8'),
row["body_type"].encode('utf-8'),
photo_url
]
spamwriter.writerow(data)
def ensure_directories(path):
if not os.path.exists(path):
os.makedirs(path)
def save_html(path, filename, content):
with open(os.path.join(path, filename), 'w') as f:
f.write(content)
def fetch_page(page=1, total=None):
page = int(page)
print "Fetching page %d" % page
r = requests.post(base_url, {"page": page})
content = r.text.encode('utf-8').strip()
if r.status_code == 200:
save_html(html_path, "page-%d.html" % page, content)
if (page < total):
fetch_page(page + 1, total=total)
else:
return content
def get_total_page(content):
soup = BeautifulSoup(content)
total_page_html = soup.find('a', href="javascript:goPage('5');").get('href')
return int(re.match(r".+goPage\(\'(\d+)\'\)", total_page_html).group(1))
def download_image(filename, animal_id, save_path, save_name):
if not os.path.exists(os.path.join(save_path, save_name)):
print "downloading image, id=", animal_id
ensure_directories(save_path)
r = requests.get(image_url + filename, stream=True)
if r.status_code == 200:
with open(os.path.join(save_path, save_name), 'wb') as f:
r.raw.decode_content = True
shutil.copyfileobj(r.raw, f)
else:
print "photo exists, skip. %s/%s" % (save_path, save_name)
def fetch_detail_page(url, animal_id):
try:
with open(os.path.join(html_path, "detail-page-%d.html" % animal_id), 'r') as f:
print "use detail-page-%d.html cached file." % animal_id
content = f.read()
except IOError:
print "fetching detail page, id =", animal_id
r = requests.get(urljoin(base_url, url))
if r.status_code == 200:
content = r.text.encode('utf-8').strip()
save_html(html_path, 'detail-page-%d.html' % animal_id, content)
return extract_detail_info(content)
def extract_detail_info(content):
soup = BeautifulSoup(content)
data = {
"id": animal_id
}
infos = soup.find("div", class_="word").find_all("li")
for info in infos:
title = info.find("span").contents[0]
title = title.replace(" ", "")
if title in data_schema.keys():
animal_info = ""
try:
animal_info = info.contents[1]
except:
pass
data[data_schema[title]] = animal_info
parsed_date = tuple(map(int, data['enter_date'].split('-')))
y, m, d = parsed_date
data['enter_date'] = date(y + 1911, m, d).strftime("%Y-%m-%d")
# download image
img_src = soup.find("div", class_="photo").select("img")[0].get('src').split('/')[-1]
data["photo"] = image_url + img_src
filename, ext = os.path.splitext(img_src)
save_path = data['enter_date']
save_name = filename + ext.lower()
download_image(img_src, animal_id, save_path, save_name)
return data
def extract_animal_id(content):
detail_url = "%s?%s" % (base_url, content.split('?')[-1])
qs = parse_qs(urlparse(detail_url).query)
[animal_id] = qs.get('id')
return int(animal_id)
if __name__ == "__main__":
ensure_directories(html_path)
db = DB(table_name="keelung")
result = fetch_page()
total_pages = get_total_page(result)
print "Total: %d pages" % total_pages
fetch_page(2, total=total_pages)
count = 0
page_files = next(os.walk(html_path))[2]
for page_file in page_files:
if not page_file.startswith('page'):
continue
with open(os.path.join(html_path, page_file), 'r') as f:
content = f.read()
soup = BeautifulSoup(content)
animal_link_list = soup.find("ol", class_="search_img_list").find_all("li")
animal_link_list = [l.find('a').get('href') for l in animal_link_list]
for link in animal_link_list:
count += 1
animal_id = extract_animal_id(link)
animal = db.get_animal(animal_id)
if animal:
print "animal id: %d exists, skip fetch" % animal_id
continue
data = fetch_detail_page(link, animal_id)
db.save(data)
db.to_csv()
print "All %d items." % count
| cc0-1.0 | -2,973,393,827,288,971,000 | 33.118143 | 180 | 0.533886 | false | 3.327572 | false | false | false |
dscorbett/pygments | tests/test_python.py | 1 | 32596 | # -*- coding: utf-8 -*-
"""
Python Tests
~~~~~~~~~~~~
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import pytest
from pygments.lexers import PythonLexer, Python3Lexer
from pygments.token import Token
import re
@pytest.fixture(scope='module')
def lexer2():
yield PythonLexer()
@pytest.fixture(scope='module')
def lexer3():
yield Python3Lexer()
def test_cls_builtin(lexer2):
"""
Tests that a cls token gets interpreted as a Token.Name.Builtin.Pseudo
"""
fragment = 'class TestClass():\n @classmethod\n def hello(cls):\n pass\n'
tokens = [
(Token.Keyword, 'class'),
(Token.Text, ' '),
(Token.Name.Class, 'TestClass'),
(Token.Punctuation, '('),
(Token.Punctuation, ')'),
(Token.Punctuation, ':'),
(Token.Text, '\n'),
(Token.Text, ' '),
(Token.Name.Decorator, '@classmethod'),
(Token.Text, '\n'),
(Token.Text, ' '),
(Token.Keyword, 'def'),
(Token.Text, ' '),
(Token.Name.Function, 'hello'),
(Token.Punctuation, '('),
(Token.Name.Builtin.Pseudo, 'cls'),
(Token.Punctuation, ')'),
(Token.Punctuation, ':'),
(Token.Text, '\n'),
(Token.Text, ' '),
(Token.Keyword, 'pass'),
(Token.Text, '\n'),
]
assert list(lexer2.get_tokens(fragment)) == tokens
def test_needs_name(lexer3):
"""
Tests that '@' is recognized as an Operator
"""
fragment = u'S = (H @ beta - r).T @ inv(H @ V @ H.T) @ (H @ beta - r)\n'
tokens = [
(Token.Name, u'S'),
(Token.Text, u' '),
(Token.Operator, u'='),
(Token.Text, u' '),
(Token.Punctuation, u'('),
(Token.Name, u'H'),
(Token.Text, u' '),
(Token.Operator, u'@'),
(Token.Text, u' '),
(Token.Name, u'beta'),
(Token.Text, u' '),
(Token.Operator, u'-'),
(Token.Text, u' '),
(Token.Name, u'r'),
(Token.Punctuation, u')'),
(Token.Operator, u'.'),
(Token.Name, u'T'),
(Token.Text, u' '),
(Token.Operator, u'@'),
(Token.Text, u' '),
(Token.Name, u'inv'),
(Token.Punctuation, u'('),
(Token.Name, u'H'),
(Token.Text, u' '),
(Token.Operator, u'@'),
(Token.Text, u' '),
(Token.Name, u'V'),
(Token.Text, u' '),
(Token.Operator, u'@'),
(Token.Text, u' '),
(Token.Name, u'H'),
(Token.Operator, u'.'),
(Token.Name, u'T'),
(Token.Punctuation, u')'),
(Token.Text, u' '),
(Token.Operator, u'@'),
(Token.Text, u' '),
(Token.Punctuation, u'('),
(Token.Name, u'H'),
(Token.Text, u' '),
(Token.Operator, u'@'),
(Token.Text, u' '),
(Token.Name, u'beta'),
(Token.Text, u' '),
(Token.Operator, u'-'),
(Token.Text, u' '),
(Token.Name, u'r'),
(Token.Punctuation, u')'),
(Token.Text, u'\n'),
]
assert list(lexer3.get_tokens(fragment)) == tokens
def test_pep_515(lexer3):
"""
Tests that the lexer can parse numeric literals with underscores
"""
fragments = (
(Token.Literal.Number.Integer, u'1_000_000'),
(Token.Literal.Number.Float, u'1_000.000_001'),
(Token.Literal.Number.Float, u'1_000e1_000j'),
(Token.Literal.Number.Hex, u'0xCAFE_F00D'),
(Token.Literal.Number.Bin, u'0b_0011_1111_0100_1110'),
(Token.Literal.Number.Oct, u'0o_777_123'),
)
for token, fragment in fragments:
tokens = [
(token, fragment),
(Token.Text, u'\n'),
]
assert list(lexer3.get_tokens(fragment)) == tokens
def test_walrus_operator(lexer3):
"""
Tests that ':=' is recognized as an Operator
"""
fragment = u'if (a := 2) > 4:'
tokens = [
(Token.Keyword, 'if'),
(Token.Text, ' '),
(Token.Punctuation, '('),
(Token.Name, 'a'),
(Token.Text, ' '),
(Token.Operator, ':='),
(Token.Text, ' '),
(Token.Literal.Number.Integer, '2'),
(Token.Punctuation, ')'),
(Token.Text, ' '),
(Token.Operator, '>'),
(Token.Text, ' '),
(Token.Literal.Number.Integer, '4'),
(Token.Punctuation, ':'),
(Token.Text, '\n'),
]
assert list(lexer3.get_tokens(fragment)) == tokens
def test_fstring(lexer3):
"""
Tests that the lexer can parse f-strings
"""
fragments_and_tokens = (
# examples from PEP-0498
(
"f'My name is {name}, my age next year is {age+1}, my anniversary is {anniversary:%A, %B %d, %Y}.'\n",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Single, 'My name is '),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'name'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, ', my age next year is '),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'age'),
(Token.Operator, '+'),
(Token.Literal.Number.Integer, '1'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, ', my anniversary is '),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'anniversary'),
(Token.Literal.String.Interpol, ':'),
(Token.Literal.String.Single, '%A, %B %d, %Y'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, '.'),
(Token.Literal.String.Single, "'"),
(Token.Text, u'\n')
]
), (
"f'He said his name is {name!r}.'\n",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Single, 'He said his name is '),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'name'),
(Token.Literal.String.Interpol, '!r}'),
(Token.Literal.String.Single, '.'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
"f'input={value:#06x}'\n",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Single, 'input='),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'value'),
(Token.Literal.String.Interpol, ':'),
(Token.Literal.String.Single, '#06x'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
"""f'{"quoted string"}'\n""",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Interpol, '{'),
(Token.Literal.String.Double, '"'),
(Token.Literal.String.Double, 'quoted string'),
(Token.Literal.String.Double, '"'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
"""f'{f"{inner}"}'\n""", # not in the PEP
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Interpol, '{'),
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Double, '"'),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'inner'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Double, '"'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
# SyntaxError: f-string expression part cannot include a backslash
"f'{\\'quoted string\\'}'\n",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Interpol, '{'),
(Token.Error, '\\'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Single, 'quoted string'),
(Token.Literal.String.Escape, "\\'"),
(Token.Literal.String.Single, '}'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
"f'{{ {4*10} }}'\n",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Escape, '{{'),
(Token.Literal.String.Single, ' '),
(Token.Literal.String.Interpol, '{'),
(Token.Literal.Number.Integer, '4'),
(Token.Operator, '*'),
(Token.Literal.Number.Integer, '10'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, ' '),
(Token.Literal.String.Escape, '}}'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
"f'{{{4*10}}}'\n",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Escape, '{{'),
(Token.Literal.String.Interpol, '{'),
(Token.Literal.Number.Integer, '4'),
(Token.Operator, '*'),
(Token.Literal.Number.Integer, '10'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Escape, '}}'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
"fr'x={4*10}'\n",
[
(Token.Literal.String.Affix, 'fr'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Single, "x="),
(Token.Literal.String.Interpol, '{'),
(Token.Literal.Number.Integer, '4'),
(Token.Operator, '*'),
(Token.Literal.Number.Integer, '10'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
"""f'abc {a["x"]} def'\n""",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Single, 'abc '),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'a'),
(Token.Punctuation, '['),
(Token.Literal.String.Double, '"'),
(Token.Literal.String.Double, 'x'),
(Token.Literal.String.Double, '"'),
(Token.Punctuation, ']'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, ' def'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
"f'''abc {a['x']} def'''\n",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'''"),
(Token.Literal.String.Single, 'abc '),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'a'),
(Token.Punctuation, '['),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Single, 'x'),
(Token.Literal.String.Single, "'"),
(Token.Punctuation, ']'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, ' def'),
(Token.Literal.String.Single, "'''"),
(Token.Text, '\n')
]
), (
"""f'''{x
+1}'''\n""",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'''"),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'x'),
(Token.Text, '\n'),
(Token.Operator, '+'),
(Token.Literal.Number.Integer, '1'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, "'''"),
(Token.Text, '\n')
]
), (
"""f'''{d[0
]}'''\n""",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'''"),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'd'),
(Token.Punctuation, '['),
(Token.Literal.Number.Integer, '0'),
(Token.Text, '\n'),
(Token.Punctuation, ']'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, "'''"),
(Token.Text, '\n')
]
), (
"f'result: {value:{width}.{precision}}'\n",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Single, 'result: '),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'value'),
(Token.Literal.String.Interpol, ':'),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'width'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, '.'),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'precision'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
"'a' 'b' f'{x}' '{c}' f'str<{y:^4}>' 'd' 'e'\n",
[
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Single, 'a'),
(Token.Literal.String.Single, "'"),
(Token.Text, ' '),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Single, 'b'),
(Token.Literal.String.Single, "'"),
(Token.Text, ' '),
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'x'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, "'"),
(Token.Text, ' '),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Interpol, '{c}'),
(Token.Literal.String.Single, "'"),
(Token.Text, ' '),
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Single, 'str<'),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'y'),
(Token.Literal.String.Interpol, ':'),
(Token.Literal.String.Single, '^4'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, '>'),
(Token.Literal.String.Single, "'"),
(Token.Text, ' '),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Single, 'd'),
(Token.Literal.String.Single, "'"),
(Token.Text, ' '),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Single, 'e'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
"f'{i}:{d[i]}'\n",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'i'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, ':'),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'd'),
(Token.Punctuation, '['),
(Token.Name, 'i'),
(Token.Punctuation, ']'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
"f'x = {x:+3}'\n",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Single, "x = "),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'x'),
(Token.Literal.String.Interpol, ':'),
(Token.Literal.String.Single, '+3'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
"f'{fn(lst,2)} {fn(lst,3)}'\n",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'fn'),
(Token.Punctuation, '('),
(Token.Name, 'lst'),
(Token.Punctuation, ','),
(Token.Literal.Number.Integer, '2'),
(Token.Punctuation, ')'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, ' '),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'fn'),
(Token.Punctuation, '('),
(Token.Name, 'lst'),
(Token.Punctuation, ','),
(Token.Literal.Number.Integer, '3'),
(Token.Punctuation, ')'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
"f'mapping is { {a:b for (a, b) in ((1, 2), (3, 4))} }'\n",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Single, 'mapping is '),
(Token.Literal.String.Interpol, '{'),
(Token.Text, ' '),
(Token.Punctuation, '{'),
(Token.Name, 'a'),
(Token.Punctuation, ':'),
(Token.Name, 'b'),
(Token.Text, ' '),
(Token.Keyword, 'for'),
(Token.Text, ' '),
(Token.Punctuation, '('),
(Token.Name, 'a'),
(Token.Punctuation, ','),
(Token.Text, ' '),
(Token.Name, 'b'),
(Token.Punctuation, ')'),
(Token.Text, ' '),
(Token.Operator.Word, 'in'),
(Token.Text, ' '),
(Token.Punctuation, '('),
(Token.Punctuation, '('),
(Token.Literal.Number.Integer, '1'),
(Token.Punctuation, ','),
(Token.Text, ' '),
(Token.Literal.Number.Integer, '2'),
(Token.Punctuation, ')'),
(Token.Punctuation, ','),
(Token.Text, ' '),
(Token.Punctuation, '('),
(Token.Literal.Number.Integer, '3'),
(Token.Punctuation, ','),
(Token.Text, ' '),
(Token.Literal.Number.Integer, '4'),
(Token.Punctuation, ')'),
(Token.Punctuation, ')'),
(Token.Punctuation, '}'),
(Token.Text, ' '),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
"""f'a={d["a"]}'\n""",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Single, 'a='),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'd'),
(Token.Punctuation, '['),
(Token.Literal.String.Double, '"'),
(Token.Literal.String.Double, 'a'),
(Token.Literal.String.Double, '"'),
(Token.Punctuation, ']'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
"f'a={d[a]}'\n",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Single, 'a='),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'd'),
(Token.Punctuation, '['),
(Token.Name, 'a'),
(Token.Punctuation, ']'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
"fr'{header}:\\s+'\n",
[
(Token.Literal.String.Affix, 'fr'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'header'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, ':'),
(Token.Literal.String.Single, '\\'),
(Token.Literal.String.Single, 's+'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
"f'{a!r}'\n",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'a'),
(Token.Literal.String.Interpol, '!r}'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
"f'{(lambda x: x*2)(3)}'\n",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Interpol, '{'),
(Token.Punctuation, '('),
(Token.Keyword, 'lambda'),
(Token.Text, ' '),
(Token.Name, 'x'),
(Token.Punctuation, ':'),
(Token.Text, ' '),
(Token.Name, 'x'),
(Token.Operator, '*'),
(Token.Literal.Number.Integer, '2'),
(Token.Punctuation, ')'),
(Token.Punctuation, '('),
(Token.Literal.Number.Integer, '3'),
(Token.Punctuation, ')'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
"extra = f'{extra},waiters:{len(self._waiters)}'\n",
[
(Token.Name, 'extra'),
(Token.Text, ' '),
(Token.Operator, '='),
(Token.Text, ' '),
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'extra'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, ',waiters:'),
(Token.Literal.String.Interpol, '{'),
(Token.Name.Builtin, 'len'),
(Token.Punctuation, '('),
(Token.Name.Builtin.Pseudo, 'self'),
(Token.Operator, '.'),
(Token.Name, '_waiters'),
(Token.Punctuation, ')'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
'message.append(f" [line {lineno:2d}]")\n',
[
(Token.Name, 'message'),
(Token.Operator, '.'),
(Token.Name, 'append'),
(Token.Punctuation, '('),
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Double, '"'),
(Token.Literal.String.Double, ' [line '),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'lineno'),
(Token.Literal.String.Interpol, ':'),
(Token.Literal.String.Double, '2d'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Double, ']'),
(Token.Literal.String.Double, '"'),
(Token.Punctuation, ')'),
(Token.Text, '\n')
]
),
# Examples from https://bugs.python.org/issue36817
(
'f"{foo=}"\n',
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Double, '"'),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'foo'),
(Token.Literal.String.Interpol, '=}'),
(Token.Literal.String.Double, '"'),
(Token.Text, '\n')
]
), (
"f'{foo=!s}'\n",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'foo'),
(Token.Literal.String.Interpol, '=!s}'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
'f"{math.pi=!f:.2f}"\n',
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Double, '"'),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'math'),
(Token.Operator, '.'),
(Token.Name, 'pi'),
(Token.Literal.String.Interpol, '=!f:'),
(Token.Literal.String.Double, '.2f'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Double, '"'),
(Token.Text, '\n')
]
), (
'f"{ chr(65) =}"\n',
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Double, '"'),
(Token.Literal.String.Interpol, '{'),
(Token.Text, ' '),
(Token.Name.Builtin, 'chr'),
(Token.Punctuation, '('),
(Token.Literal.Number.Integer, '65'),
(Token.Punctuation, ')'),
(Token.Text, ' '),
(Token.Literal.String.Interpol, '=}'),
(Token.Literal.String.Double, '"'),
(Token.Text, '\n')
]
), (
'f"{chr(65) = }"\n',
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Double, '"'),
(Token.Literal.String.Interpol, '{'),
(Token.Name.Builtin, 'chr'),
(Token.Punctuation, '('),
(Token.Literal.Number.Integer, '65'),
(Token.Punctuation, ')'),
(Token.Text, ' '),
(Token.Literal.String.Interpol, '= }'),
(Token.Literal.String.Double, '"'),
(Token.Text, '\n')
]
), (
"f'*{n=:30}*'\n",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Single, '*'),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'n'),
(Token.Literal.String.Interpol, '=:'),
(Token.Literal.String.Single, '30'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, '*'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
"f'*{n=!r:30}*'\n",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Single, '*'),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'n'),
(Token.Literal.String.Interpol, '=!r:'),
(Token.Literal.String.Single, '30'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, '*'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
"""f"*{f'{n=}':30}*"\n""",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Double, '"'),
(Token.Literal.String.Double, '*'),
(Token.Literal.String.Interpol, '{'),
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'n'),
(Token.Literal.String.Interpol, '=}'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Interpol, ':'),
(Token.Literal.String.Double, '30'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Double, '*'),
(Token.Literal.String.Double, '"'),
(Token.Text, '\n')
]
), (
"f'*{n=:+<30}*'\n",
[
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'"),
(Token.Literal.String.Single, '*'),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'n'),
(Token.Literal.String.Interpol, '=:'),
(Token.Literal.String.Single, '+<30'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, '*'),
(Token.Literal.String.Single, "'"),
(Token.Text, '\n')
]
), (
"""
f'''{foo
= !s:20}'''\n""",
[
(Token.Text, ' '),
(Token.Literal.String.Affix, 'f'),
(Token.Literal.String.Single, "'''"),
(Token.Literal.String.Interpol, '{'),
(Token.Name, 'foo'),
(Token.Text, '\n '),
(Token.Literal.String.Interpol, '= !s:'),
(Token.Literal.String.Single, '20'),
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, "'''"),
(Token.Text, '\n')
]
)
)
for fragment,tokens in fragments_and_tokens:
assert list(lexer3.get_tokens(fragment)) == tokens
# Now switch between single and double quotes, to cover both cases equally
rep = {"'":'"', '"':"'"}
pattern = re.compile("|".join(rep.keys()))
for fragment,tokens in fragments_and_tokens:
fragment = pattern.sub(lambda m: rep[m.group(0)], fragment)
tokens = list(tokens)
for i,(token,match) in enumerate(tokens):
if token == Token.Literal.String.Single:
token = Token.Literal.String.Double
elif token == Token.Literal.String.Double:
token = Token.Literal.String.Single
match = pattern.sub(lambda m: rep[m.group(0)], match)
tokens[i] = (token, match)
assert list(lexer3.get_tokens(fragment)) == tokens
| bsd-2-clause | 6,313,952,533,965,913,000 | 37.712589 | 114 | 0.421708 | false | 4.008362 | true | false | false |
bayazee/flask-mosession | flask_mosession/__init__.py | 1 | 9217 | # -*- coding: utf-8 -*-
"""
flask_mosession
~~~~~~~~~~~~~~~~~~
Alternative for Flask session module that uses MongoDB as main storage
:copyright: (c) 2013 by Bayazee & Rokooie.
:license: BSD, see LICENSE for more details.
"""
from bson import Binary
from uuid import uuid4
from flask import current_app
from flask.sessions import SessionInterface, SessionMixin
from pymongo.errors import ConnectionFailure
from werkzeug.datastructures import CallbackDict
from cache_backends import NoCacheBackend
__revision__ = '$Revision: e1a7ef4049fb $'
class MoSession(CallbackDict, SessionMixin):
"""
Session replacement class.
The session object will be an instance of this class or it's children.
By importing flask.session, you will get this class' object.
The MoSession class will save data only when it's necessary,
empty sessions will not saved.
"""
def __init__(self, initial=None):
def _on_update(d):
d.modified = True
CallbackDict.__init__(self, initial, _on_update)
if initial:
self.modified = False
else:
self.generate_sid()
self.new = True
self.modified = True
def generate_sid(self):
"""
Generate session id using UUID4 and store it in the object's _id attribute.
:return: (Binary) Session id
"""
self['_id'] = Binary(str(uuid4()))
return self['_id']
def remove_stored_session(self):
current_app.extensions['mosession'].storage.collection.remove({'_id': self['_id']})
current_app.extensions['mosession'].cache.remove(str(self['_id']))
def destroy(self):
"""Destroys a session completely, by deleting all keys and removing it
from the internal store immediately.
This allows removing a session for security reasons, e.g. a login
stored in a session will cease to exist if the session is destroyed.
"""
self.remove_stored_session()
self.clear()
self.new = True
self.generate_sid()
def regenerate(self):
"""Generate a new session id for this session.
To avoid vulnerabilities through `session fixation attacks
<http://en.wikipedia.org/wiki/Session_fixation>`_, this function can be
called after an action like a login has taken place. The session will
be copied over to a new session id and the old one removed.
"""
self.remove_stored_session()
self.new = True
self.generate_sid()
@property
def sid(self):
"""
Return session id.
Session id is stored in database as it's _id field.
:return: Session id
"""
return str(self['_id'])
def __setattr__(self, *args, **kwargs):
return SessionMixin.__setattr__(self, *args, **kwargs)
class MoSessionInterface(SessionInterface):
"""
MoSession interface class, flask session interface is replaced with this.
MoSession Interface helps developer to overload or change operation functionality of flask central session manager.
"""
session_class = MoSession
@property
def _mosession(self):
"""
Returns current app's MoSession extension instance.
"""
return current_app.extensions['mosession']
def load_session(self, sid):
"""
Load session from cache or database, If found in database but not in cache, saves it in cache too.
:param sid: Session ID
:return: An instance of type session_class with session data or None if session not found
"""
if not sid:
return None
stored_session = self._mosession.cache.get(sid)
if not stored_session:
stored_session = self._mosession.storage.collection.find_one({'_id': Binary(sid)})
if stored_session:
self._mosession.cache.set(sid, stored_session)
return self.session_class(stored_session) if stored_session else None
def open_session(self, app, request):
"""
Overrides open_session interface.
Tries to load session, in case of failure creates a new instance of session_class type.
:param app: Current app's instance (required to load SESSION_COOKIE_NAME field from config)
:param request: Current request
:return: Session object
"""
return self.load_session(str(request.cookies.get(app.config['SESSION_COOKIE_NAME'], ''))) or self.session_class()
def raw_save_session(self, session):
"""
Save session in database and also in cache.
:param session: Session object
:return:
"""
dict_session = dict(session)
self._mosession.storage.collection.save(dict_session)
self._mosession.cache.set(session.sid, dict_session)
def save_session(self, app, session, response):
"""
Overrides save_session interface.
Save session data if it's modified, it cares about session expiration and other features.
operation of function :
step 1:if modified flag of session is true then function go to step 2 else function do nothing
step 2:function calculate expire time and session permanent then if new flags of session and expire are true then change
session expire property to expire time
step 3:now if new flag of session is true set session sid (session id) and change flag to false.set sid and current cookie
data in cookies
step 4:set current session (new created) to current cache
step 5:set modified flag os session to false
:param app: Current app's instance (required to load SESSION_COOKIE_NAME field from config)
:param session: Session object
:param response: Response object
"""
if not session.modified:
return
session.permanent = not app.config['SESSION_EXPIRE_AT_BROWSER_CLOSE']
expiration = self.get_expiration_time(app, session)
if session.new and expiration:
# TODO: Is this line really necessary?
session['expire'] = expiration
self.raw_save_session(session)
if session.new:
session.new = False
response.set_cookie(
key=app.config['SESSION_COOKIE_NAME'],
value=session.sid,
domain=self.get_cookie_domain(app),
expires=expiration, httponly=self.get_cookie_httponly(app)
)
session.modified = False
class SessionStorage(object):
"""
The class role is to serve the storage, So it's a wrapper on pymongo's database class to add auto reconnect.
:param app: Current Application Object
"""
def __init__(self, host, port, database_name, collection_name):
self.host = host
self.port = port
self.database_name = database_name
self.collection_name = collection_name
self._collection = None
@property
def collection(self):
if not self._collection:
self.connect()
return self._collection
def connect(self):
"""
Try to connect to mongodb and set self.database to sessions's database reference.
It will try 5 times to connect to database - with 100ms delay between tries -
"""
if self._collection:
return
from pymongo.connection import Connection
from pymongo.errors import AutoReconnect
for _connection_attempts in range(5):
try:
self._collection = Connection(self.host, self.port)[self.database_name][self.collection_name]
except AutoReconnect:
from time import sleep
sleep(0.1)
else:
break
else:
raise ConnectionFailure
class MoSessionExtension(object):
"""
MoSession extension object.
"""
def __init__(self, app=None):
self.app = None
self.session_class = None
self.storage = None
self._collection = None
if app:
self.init_app(app)
def init_app(self, app):
"""
Register flask-mosession with Flask's app instance.
:param app: Flask's app instance
"""
app.extensions['mosession'] = self
app.config.setdefault('MONGODB_SESSIONS_COLLECTION_NAME', 'sessions')
app.config.setdefault('SESSION_EXPIRE_AT_BROWSER_CLOSE', True)
app.config.setdefault('MOSESSION_CACHE_BACKEND', 'NoCacheBackend')
self.cache = getattr(cache_backends, app.config['MOSESSION_CACHE_BACKEND'])(app)
self.storage = SessionStorage(
app.config['MONGODB_HOST'],
app.config['MONGODB_PORT'],
app.config['MONGODB_DATABASE'],
app.config['MONGODB_SESSIONS_COLLECTION_NAME'],
)
app.session_interface = MoSessionInterface()
if self.session_class:
app.session_interface.session_class = self.session_class
def cleanup_sessions(self):
# TODO: ba dastorate mongodb document haye expire shode bayad hazf beshe
pass
| bsd-3-clause | 4,458,105,034,843,652,600 | 31.114983 | 130 | 0.626777 | false | 4.452657 | true | false | false |
fquinto/NoConName2015_IoT | source_code/mosquitto_iot.py | 1 | 4956 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author Version Date Comments
# FQuinto 1.0.0 2015-Nov First version fron NoConName 2015 event
# Do test
# Copyright (C) 2015 Fran Quinto
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import RPi.GPIO as GPIO
import time
import os
import sys
import paho.mqtt.client as mqtt
import Adafruit_DHT
# Sensor should be set to Adafruit_DHT.DHT11,
# Adafruit_DHT.DHT22, or Adafruit_DHT.AM2302.
sensor = Adafruit_DHT.DHT22
# Pins
temphum = 2 # GPIO2, pin 3
GPIO.setwarnings(False) # Turn off warnings
GPIO.setmode(GPIO.BOARD) # Config as BOARD
# Pins
red = 12 # pin RGB LED, GPIO 18
green = 16 # pin RGB LED, GPIO 23
blue = 18 # pin RGB LED, GPIO 24
pir = 32 # pin PIR sensor, GPIO 12
boton = 7 # pin boton fisico, GPIO 4
rel1_sirena = 35 # pin Relay 1 (board izquierda), GPIO 19
rel2_giro = 36 # pin Relay 2 (board izquierda), GPIO 16
rel3_luz_sirena = 37 # pin Relay 1 (board derecha), GPIO 26
rel4 = 38 # pin Relay 2 (board derecha), GPIO 20
# setup all the pins
GPIO.setup(red, GPIO.OUT)
GPIO.setup(green, GPIO.OUT)
GPIO.setup(blue, GPIO.OUT)
GPIO.setup(pir, GPIO.IN)
GPIO.setup(boton, GPIO.IN)
GPIO.setup(rel1_sirena, GPIO.OUT)
GPIO.setup(rel2_giro, GPIO.OUT)
GPIO.setup(rel3_luz_sirena, GPIO.OUT)
GPIO.setup(rel4, GPIO.OUT)
wait = 0.1
# INIT
GPIO.output(red, 0) #Turn OFF LED
GPIO.output(green, 0) #Turn OFF LED
GPIO.output(blue, 0) #Turn OFF LED
GPIO.output(rel1_sirena, 1) #Turn OFF
GPIO.output(rel2_giro, 1) #Turn OFF
GPIO.output(rel3_luz_sirena, 1) #Turn OFF
GPIO.output(rel4, 1) #Turn OFF
# The callback for when the client receives a CONNACK response from the server.
def on_connect(client, userdata, flags, rc):
print("Connected with result code "+str(rc))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe("orden")
# The callback for when a PUBLISH message is received from the server.
def on_message(client, userdata, msg):
print(msg.topic+"\nMensaje: "+str(msg.payload))
if (msg.topic == 'orden'):
if (msg.payload == 'temperatura'):
humidity, temperature = Adafruit_DHT.read_retry(sensor, temphum)
if temperature is not None:
message = 'temperatura:{0:0.1f}'.format(temperature)
else:
message = 'temperatura:0'
client.publish("temperatura", message)
if (msg.payload == 'humedad'):
humidity, temperature = Adafruit_DHT.read_retry(sensor, temphum)
if humidity is not None:
message = 'humedad:{0:0.1f}'.format(humidity)
else:
message = 'humedad:0'
client.publish("humedad", message)
if (msg.payload == 'giroON'):
GPIO.output(rel2_giro, 0) #Turn ON
if (msg.payload == 'luzON'):
GPIO.output(rel3_luz_sirena, 0) #Turn ON
if (msg.payload == 'sirenaON'):
GPIO.output(rel1_sirena, 0) #Turn ON
if (msg.payload == 'giroOFF'):
GPIO.output(rel2_giro, 1) #Turn OFF
if (msg.payload == 'luzOFF'):
GPIO.output(rel3_luz_sirena, 1) #Turn OFF
if (msg.payload == 'sirenaOFF'):
GPIO.output(rel1_sirena, 1) #Turn OFF
if (msg.payload == 'dispara'):
os.system('mpg321 -g 100 -q mob_ua-gun_shoot_m_16.mp3 &')
try:
client = mqtt.Client()
client.on_connect = on_connect
client.on_message = on_message
client.connect("localhost", 1883, 60)
wait = 0.1
envia_mensaje_boton = True
envia_mensaje_PIR = False
# Blocking call that processes network traffic, dispatches callbacks and
# handles reconnecting.
# Other loop*() functions are available that give a threaded interface and a
# manual interface.
#client.loop_forever()
while True:
client.loop()
if ((GPIO.input(boton) == False) and (envia_mensaje_boton)):
envia_mensaje_boton = False
client.publish("boton", "ON")
elif (GPIO.input(boton) == True):
envia_mensaje_boton = True
if ((GPIO.input(pir) == True) and (envia_mensaje_PIR)):
envia_mensaje_PIR = False
client.publish("PIR", "ON")
elif (GPIO.input(pir) == False):
envia_mensaje_PIR = True
time.sleep(wait)
except KeyboardInterrupt:
pass
GPIO.output(red, 0) #Turn OFF LED
GPIO.output(green, 0) #Turn OFF LED
GPIO.output(blue, 0) #Turn OFF LED
GPIO.output(rel1_sirena, 1) #Turn OFF
GPIO.output(rel2_giro, 1) #Turn OFF
GPIO.output(rel3_luz_sirena, 1) #Turn OFF
GPIO.output(rel4, 1) #Turn OFF
#Tidy up and remaining connections.
GPIO.cleanup() | gpl-3.0 | -8,526,168,807,758,195,000 | 30.775641 | 79 | 0.694108 | false | 2.748752 | false | false | false |
teracyhq/flask-classy | test_classful_py3/test_type_hints.py | 2 | 1623 | from uuid import UUID
from flask import Flask
from flask_classful import FlaskView, route
from nose.tools import eq_
# python3 only
class TypingView(FlaskView):
def index(self):
return "Index"
@route('/<id>', methods=['POST'])
def post(self, id: str) -> str:
return "Post"
def patch(self, id: str) -> str:
return "Patch"
def int(self, arg: int):
return str(arg)
def float(self, arg: float):
return str(arg)
def uuid(self, arg: UUID):
return str(arg)
app = Flask('typing-app')
TypingView.register(app)
client = app.test_client()
def test_index():
resp = client.get('/typing/')
eq_(b"Index", resp.data)
resp = client.get('/typing')
eq_(resp.status_code, 308)
def test_post():
resp = client.post('/typing/123')
eq_(b"Post", resp.data)
resp = client.post('/typing/123/')
eq_(resp.status_code, 405)
def test_patch():
resp = client.patch('/typing/123/')
eq_(b"Patch", resp.data)
resp = client.patch('/typing/123')
eq_(resp.status_code, 308)
def test_url_converter():
for type_, wrong_var, correct_var in [
('int', 'asdfsdf', '1'),
('float', 'sdfad', '1.1'),
('uuid', '10', '1f5018ba-1a86-4f7f-a6c5-596674562f36')
]:
url = '/typing/{}/{}/'
resp = client.get(url.format(type_, wrong_var))
# should not match the endpoint if url variable type mismatches
eq_(resp.status_code, 404)
resp = client.get(url.format(type_, correct_var))
eq_(resp.status_code, 200)
eq_(bytes(correct_var, 'utf-8'), resp.data)
| bsd-3-clause | 1,053,648,999,224,750,200 | 22.867647 | 71 | 0.588417 | false | 3.182353 | false | false | false |
b1-systems/kiwi | test/unit/test_helper.py | 2 | 1543 | from functools import wraps
import kiwi.logger
import sys
import logging
from io import BytesIO
from mock import MagicMock, patch
# default log level, overwrite when needed
kiwi.logger.log.setLevel(logging.WARN)
# default commandline used for any test, overwrite when needed
sys.argv = [
sys.argv[0], 'system', 'prepare',
'--description', 'description', '--root', 'directory'
]
argv_kiwi_tests = sys.argv
# mock open calls
patch_open = patch("{0}.open".format(
sys.version_info.major < 3 and "__builtin__" or "builtins")
)
class raises(object):
"""
exception decorator as used in nose, tools/nontrivial.py
"""
def __init__(self, *exceptions):
self.exceptions = exceptions
self.valid = ' or '.join([e.__name__ for e in exceptions])
def __call__(self, func):
name = func.__name__
def newfunc(*args, **kw):
try:
func(*args, **kw)
except self.exceptions:
pass
except Exception:
raise
else:
message = "%s() did not raise %s" % (name, self.valid)
raise AssertionError(message)
newfunc = wraps(func)(newfunc)
return newfunc
def mock_open(data=None):
'''
Mock "open" function.
:param data:
:return:
'''
data = BytesIO(data)
mock = MagicMock()
handle = MagicMock()
handle.write.return_value = None
handle.__enter__.return_value = data or handle
mock.return_value = handle
return mock
| gpl-3.0 | -6,462,874,671,621,616,000 | 23.109375 | 70 | 0.593001 | false | 3.976804 | false | false | false |
mcheo/ansible_f5 | library/iworkflow_local_connector.py | 4 | 7897 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2017 F5 Networks Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {
'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.0'
}
DOCUMENTATION = '''
module: iworkflow_bigip_connector
short_description: Manipulate cloud BIG-IP connectors in iWorkflow.
description:
- Manipulate cloud BIG-IP connectors in iWorkflow.
version_added: 2.4
options:
name:
description:
- Name of the connector to create.
required: True
state:
description:
- When C(present), ensures that the cloud connector exists. When
C(absent), ensures that the cloud connector does not exist.
required: false
default: present
choices:
- present
- absent
notes:
- Requires the f5-sdk Python package on the host. This is as easy as pip
install f5-sdk.
extends_documentation_fragment: f5
requirements:
- f5-sdk >= 2.3.0
- iWorkflow >= 2.1.0
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = '''
- name: Create cloud connector named Private Cloud
iworkflow_bigip_connector:
name: "Private Cloud"
password: "secret"
server: "iwf.mydomain.com"
user: "admin"
delegate_to: localhost
'''
RETURN = '''
'''
from ansible.module_utils.f5_utils import (
AnsibleF5Client,
AnsibleF5Parameters,
F5ModuleError,
HAS_F5SDK,
iControlUnexpectedHTTPError
)
class Parameters(AnsibleF5Parameters):
returnables = ['name']
api_attributes = ['description']
updatables = ['description']
def to_return(self):
result = {}
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
return result
def api_params(self):
result = {}
for api_attribute in self.api_attributes:
if self.api_map is not None and api_attribute in self.api_map:
result[api_attribute] = getattr(self, self.api_map[api_attribute])
else:
result[api_attribute] = getattr(self, api_attribute)
result = self._filter_params(result)
return result
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
self.argument_spec = dict(
name=dict(required=True),
state=dict(
required=False,
default='present',
choices=['absent', 'present']
)
)
self.f5_product_name = 'iworkflow'
class ModuleManager(object):
def __init__(self, client):
self.client = client
self.have = None
self.want = Parameters(self.client.module.params)
self.changes = Parameters()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = Parameters(changed)
def _update_changed_options(self):
changed = {}
for key in Parameters.updatables:
if getattr(self.want, key) is not None:
attr1 = getattr(self.want, key)
attr2 = getattr(self.have, key)
if attr1 != attr2:
changed[key] = attr1
if changed:
self.changes = Parameters(changed)
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
try:
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
result.update(**self.changes.to_return())
result.update(dict(changed=changed))
return result
def exists(self):
"""Checks to see if a connector exists.
This method does not use ODATA queries because that functionality
is broken in iWorkflow. Therefore, we iterate over all connectors
until we find the one we're interested in.
:return:
"""
collection = self.client.api.cm.cloud.connectors.locals.get_collection()
for item in collection:
if item.displayName != "BIG-IP":
continue
if item.name != self.want.name:
continue
return True
return False
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def create(self):
self._set_changed_options()
if self.client.check_mode:
return True
self.create_on_device()
return True
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.client.check_mode:
return True
self.update_on_device()
return True
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update_on_device(self):
pass
def read_current_from_device(self):
connector = None
collection = self.client.api.cm.cloud.connectors.locals.get_collection()
for item in collection:
if item.displayName != "BIG-IP":
continue
if item.name != self.want.name:
continue
connector = item
break
if not connector:
return None
result = connector.attrs
return Parameters(result)
def create_on_device(self):
params = self.want.api_params()
self.client.api.cm.cloud.connectors.locals.local.create(
name=self.want.name,
**params
)
def absent(self):
if self.exists():
return self.remove()
return False
def remove(self):
if self.client.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the BIG-IP connector")
return True
def remove_from_device(self):
resource = None
collection = self.client.api.cm.cloud.connectors.locals.get_collection()
for item in collection:
if item.displayName != "BIG-IP":
continue
if item.name != self.want.name:
continue
resource = item
break
if resource:
resource.delete()
def main():
if not HAS_F5SDK:
raise F5ModuleError("The python f5-sdk module is required")
spec = ArgumentSpec()
client = AnsibleF5Client(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
f5_product_name=spec.f5_product_name
)
try:
mm = ModuleManager(client)
results = mm.exec_module()
client.module.exit_json(**results)
except F5ModuleError as e:
client.module.fail_json(msg=str(e))
if __name__ == '__main__':
main()
| apache-2.0 | -4,420,233,054,871,470,600 | 27.003546 | 82 | 0.593263 | false | 4.196068 | false | false | false |
grow/grow-ext-build-server | grow_build_server/emailer.py | 1 | 1438 | from google.appengine.ext import vendor
vendor.add('extensions')
from google.appengine.api import mail
import jinja2
import os
import premailer
_appid = os.getenv('APPLICATION_ID').replace('s~', '')
EMAIL_SENDER = 'noreply@{}.appspotmail.com'.format(_appid)
class Emailer(object):
def __init__(self, sender=None):
self.sender = sender or EMAIL_SENDER
def send(self, to, subject, template_path, kwargs=None):
html = self._render(template_path, kwargs=kwargs)
self._send(subject, to, html)
def _render(self, template_path, kwargs=None):
params = {}
if kwargs:
params.update(kwargs)
template = self.env.get_template(template_path)
html = template.render(params)
return premailer.transform(html)
def _send(self, subject, to, html):
message = mail.EmailMessage(sender=self.sender, subject=subject)
message.to = to
message.html = html
message.send()
@property
def env(self):
path = os.path.join(os.path.dirname(__file__), 'templates')
loader = jinja2.FileSystemLoader([path])
extensions = [
'jinja2.ext.autoescape',
'jinja2.ext.do',
'jinja2.ext.loopcontrols',
'jinja2.ext.with_',
]
return jinja2.Environment(
loader=loader, extensions=extensions, autoescape=True,
trim_blocks=True)
| mit | -8,182,801,995,076,380,000 | 28.346939 | 72 | 0.614743 | false | 3.814324 | false | false | false |
abe-winter/pg13-py | pg13/table.py | 1 | 6026 | "table -- Table class"
import collections
from . import pg, threevl, sqparse2, sqex
# errors
class PgExecError(sqparse2.PgMockError):
"base class for errors during table execution"
class BadFieldName(PgExecError):
pass
class IntegrityError(PgExecError):
pass # careful: pgmock_dbapi also defines this
class Missing: "for distinguishing missing columns vs passed-in null"
def expand_row(table_fields, fields, values):
"helper for insert. turn (field_names, values) into the full-width, properly-ordered row"
table_fieldnames = [f.name for f in table_fields]
reverse_indexes = {table_fieldnames.index(f): i for i, f in enumerate(fields)}
indexes = [reverse_indexes.get(i) for i in range(len(table_fields))]
return [(Missing if i is None else values[i]) for i in indexes]
def emergency_cast(colx, value):
"""ugly: this is a huge hack. get serious about where this belongs in the architecture.
For now, most types rely on being fed in as SubbedLiteral.
"""
if colx.coltp.type.lower() == 'boolean':
if isinstance(value, sqparse2.NameX):
value = value.name
if isinstance(value, bool):
return value
return dict(true=True, false=False)[value.lower()] # keyerror if other
else:
return value # todo: type check?
def field_default(colx, table_name, tables_dict):
"takes sqparse2.ColX, Table"
if colx.coltp.type.lower() == 'serial':
next_id = sqparse2.parse('select coalesce(max(%s),-1)+1 from %s' % (colx.name, table_name))
return sqex.run_select(next_id, tables_dict, Table)[0]
elif colx.not_null:
raise NotImplementedError('todo: not_null error')
else:
return toliteral(colx.default)
FieldLookup = collections.namedtuple('FieldLookup', 'index type')
def toliteral(probably_literal):
# todo: among the exception cases are Missing, str. go through cases and make this cleaner. the test suite alone has multiple types here.
if probably_literal == sqparse2.NameX('null'):
return None
return probably_literal.toliteral() if hasattr(probably_literal, 'toliteral') else probably_literal
class Table:
def __init__(self, name, fields, pkey):
"fields is a list of sqparse2.ColX"
self.name, self.fields, self.pkey = name, fields, (pkey or [])
self.rows = []
self.child_tables = [] # tables that inherit from this one
self.parent_table = None # table this inherits from
def get_column(self, name):
col = next((f for f in self.fields if f.name == name), None)
if col is None:
raise KeyError(name)
return col
def pkey_get(self, row):
if len(self.pkey) > 0:
indexes = [i for i, f in enumerate(self.fields) if f.name in self.pkey]
if len(indexes) != len(self.pkey):
raise ValueError('bad pkey')
pkey_vals = list(map(row.__getitem__, indexes))
return next((r for r in self.rows if pkey_vals == list(map(r.__getitem__, indexes))), None)
else:
# warning: is this right? it's saying that if not given, the pkey is the whole row. test dupe inserts on a real DB.
return row if row in self.rows else None
def fix_rowtypes(self, row):
if len(row) != len(self.fields):
raise ValueError
return list(map(toliteral, row))
def apply_defaults(self, row, tables_dict):
"apply defaults to missing cols for a row that's being inserted"
return [
emergency_cast(colx, field_default(colx, self.name, tables_dict) if v is Missing else v)
for colx, v in zip(self.fields, row)
]
def insert(self, fields, values, returning, tables_dict):
nix = sqex.NameIndexer.ctor_name(self.name)
nix.resolve_aonly(tables_dict, Table)
expanded_row = self.fix_rowtypes(expand_row(self.fields, fields, values) if fields else values)
row = self.apply_defaults(expanded_row, tables_dict)
# todo: check ColX.not_null here. figure out what to do about null pkey field
for i, elt in enumerate(row):
# todo: think about dependency model if one field relies on another. (what do I mean? 'insert into t1 (a,b) values (10,a+5)'? is that valid?)
row[i] = sqex.Evaluator(row, nix, tables_dict).eval(elt)
if self.pkey_get(row):
raise pg.DupeInsert(row)
self.rows.append(row)
if returning:
return sqex.Evaluator((row,), nix, tables_dict).eval(returning)
return None
def match(self, where, tables, nix):
return [r for r in self.rows if not where or threevl.ThreeVL.test(sqex.Evaluator((r,), nix, tables).eval(where))]
def lookup(self, name):
if isinstance(name, sqparse2.NameX):
name = name.name # this is horrible; be consistent
try:
return FieldLookup(*next((i, f) for i, f in enumerate(self.fields) if f.name == name))
except StopIteration:
# todo: confirm that next() still raises StopIteration on py3
raise BadFieldName(name)
def update(self, setx, where, returning, tables_dict):
nix = sqex.NameIndexer.ctor_name(self.name)
nix.resolve_aonly(tables_dict, Table)
if not all(isinstance(x, sqparse2.AssignX) for x in setx):
raise TypeError('not_xassign', list(map(type, setx)))
match_rows = self.match(where, tables_dict, nix) if where else self.rows
for row in match_rows:
for expr in setx:
row[self.lookup(expr.col).index] = sqex.Evaluator((row,), nix, tables_dict).eval(expr.expr)
if returning:
# todo: write a test for the empty case, make sure this doesn't crash. Should I set row to None at the top or is it not that simple?
# pylint: disable=undefined-loop-variable
return sqex.Evaluator((row,), nix, tables_dict).eval(returning)
return None
def delete(self, where, tables_dict):
# todo: what's the deal with nested selects in delete. does it get evaluated once to a scalar before running the delete?
# todo: this will crash with empty where clause
nix = sqex.NameIndexer.ctor_name(self.name)
nix.resolve_aonly(tables_dict, Table)
# todo: why 'not' below?
self.rows = [r for r in self.rows if not sqex.Evaluator((r,), nix, tables_dict).eval(where)]
| mit | -8,536,767,672,996,889,000 | 42.352518 | 147 | 0.688848 | false | 3.42581 | false | false | false |
Jozhogg/iris | lib/iris/tests/unit/merge/test__CubeSignature.py | 1 | 4270 | # (C) British Crown Copyright 2014, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""Unit tests for the `iris._merge._CubeSignature` class."""
from __future__ import (absolute_import, division, print_function)
# Import iris.tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests
import mock
import numpy as np
import iris.exceptions
from iris._merge import _CubeSignature as CubeSig
class Test_match__fill_value(tests.IrisTest):
def setUp(self):
self.defn = mock.Mock(standard_name=mock.sentinel.standard_name,
long_name=mock.sentinel.long_name,
var_name=mock.sentinel.var_name,
units=mock.sentinel.units,
attributes=mock.sentinel.attributes,
cell_methods=mock.sentinel.cell_methods)
self.data_shape = mock.sentinel.data_shape
self.data_type = mock.sentinel.data_type
def test_non_nan_fill_value_equal(self):
sig1 = CubeSig(self.defn, self.data_shape, self.data_type, 10)
sig2 = CubeSig(self.defn, self.data_shape, self.data_type, 10)
self.assertTrue(sig1.match(sig2, True))
self.assertTrue(sig1.match(sig2, False))
self.assertTrue(sig2.match(sig1, True))
self.assertTrue(sig2.match(sig1, False))
def test_non_nan_fill_value_unequal(self):
sig1 = CubeSig(self.defn, self.data_shape, self.data_type, 10)
sig2 = CubeSig(self.defn, self.data_shape, self.data_type, 20)
with self.assertRaises(iris.exceptions.MergeError):
sig1.match(sig2, True)
self.assertFalse(sig1.match(sig2, False))
with self.assertRaises(iris.exceptions.MergeError):
sig2.match(sig1, True)
self.assertFalse(sig2.match(sig1, False))
def test_nan_fill_value_equal(self):
sig1 = CubeSig(self.defn, self.data_shape, self.data_type, np.nan)
sig2 = CubeSig(self.defn, self.data_shape, self.data_type, np.nan)
self.assertTrue(sig1.match(sig2, True))
self.assertTrue(sig1.match(sig2, False))
self.assertTrue(sig2.match(sig1, True))
self.assertTrue(sig2.match(sig1, False))
def test_nan_fill_value_unequal(self):
sig1 = CubeSig(self.defn, self.data_shape, self.data_type, np.nan)
sig2 = CubeSig(self.defn, self.data_shape, self.data_type, 10)
with self.assertRaises(iris.exceptions.MergeError):
sig1.match(sig2, True)
self.assertFalse(sig1.match(sig2, False))
with self.assertRaises(iris.exceptions.MergeError):
sig2.match(sig1, True)
self.assertFalse(sig2.match(sig1, False))
def test_str_fill_value_equal(self):
sig1 = CubeSig(self.defn, self.data_shape, self.data_type, ' ')
sig2 = CubeSig(self.defn, self.data_shape, self.data_type, ' ')
self.assertTrue(sig1.match(sig2, True))
self.assertTrue(sig1.match(sig2, False))
self.assertTrue(sig2.match(sig1, True))
self.assertTrue(sig2.match(sig1, False))
def test_str_fill_value_unequal(self):
sig1 = CubeSig(self.defn, self.data_shape, self.data_type, ' ')
sig2 = CubeSig(self.defn, self.data_shape, self.data_type, '_')
with self.assertRaises(iris.exceptions.MergeError):
sig1.match(sig2, True)
self.assertFalse(sig1.match(sig2, False))
with self.assertRaises(iris.exceptions.MergeError):
sig2.match(sig1, True)
self.assertFalse(sig2.match(sig1, False))
if __name__ == '__main__':
tests.main()
| lgpl-3.0 | -3,409,901,609,769,241,000 | 42.131313 | 74 | 0.660422 | false | 3.440774 | true | false | false |
mostateresnet/exdbproject | exdb/urls.py | 1 | 2285 | """exdb URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/dev/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import url
from django.contrib.auth.views import login, logout_then_login
from exdb import views
urlpatterns = [
url(r'^$', views.HomeView.as_view(), name='home'),
url(r'^create$', views.CreateExperienceView.as_view(), name='create_experience'),
url(r'^approval/(?P<pk>\d+)$', views.ExperienceApprovalView.as_view(), name='approval'),
url(r'^conclusion/(?P<pk>\d+)$', views.ExperienceConclusionView.as_view(), name='conclusion'),
url(r'^view/(?P<pk>\d+)$', views.ViewExperienceView.as_view(), name='view_experience'),
url(r'^edit/(?P<pk>\d+)$', views.EditExperienceView.as_view(), name='edit'),
url(r'^login$', login, name='login', kwargs={'template_name': 'exdb/login.html'}),
url(r'^logout$', logout_then_login, name='logout'),
url(r'^list/upcoming$', views.ListExperienceByStatusView.as_view(readable_status="Upcoming"), name="upcoming_list"),
url(r'^list/needs-evaluation$', views.ListExperienceByStatusView.as_view(readable_status="Needs Evaluation"), name="eval_list"),
url(r'^list/(?P<status>[a-zA-Z\-]+)$', views.ListExperienceByStatusView.as_view(), name='status_list'),
url(r'^experience/search/$', views.SearchExperienceResultsView.as_view(), name='search'),
url(r'^experience/search/report$', views.SearchExperienceReport.as_view(), name='search_report'),
url(r'^complete/(?P<pk>\d+)?$', views.CompletionBoardView.as_view(), name='completion_board'),
url(r'^requirement/view/(?P<pk>\d+)$', views.ViewRequirementView.as_view(), name='view_requirement'),
url(r'^section/complete/(?P<pk>\d+)?$', views.SectionCompletionBoardView.as_view(), name='section_completion_board'),
]
| mit | -2,978,080,121,025,919,000 | 60.756757 | 132 | 0.691028 | false | 3.297258 | false | false | false |
JhoanLT/Mascotas | refugio/refugio/urls.py | 1 | 2138 | """refugio URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
from django.contrib.auth.views import login, logout_then_login, password_reset, password_reset_done, password_reset_confirm, password_reset_complete
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^mascota/', include('apps.mascota.urls', namespace="mascota")),
url(r'^adopcion/', include('apps.adopcion.urls', namespace="adopcion")),
url(r'^usuario/', include('apps.usuarios.urls', namespace="usuario")),
url(r'^accounts/login/', login, {'template_name': 'index.html'}, name='login'),
url(r'^logout/', logout_then_login, name='logout'),
url(r'^reset/password_reset', password_reset, {'template_name': 'registration/password_reset_form.html',
'email_template_name': 'registration/password_reset_email.html'}, name='password_reset'),
url(r'^reset/password_reset_done', password_reset_done, {'template_name': 'registration/password_reset_done.html'},
name='password_reset_done'),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>.+)/$', password_reset_confirm, {'template_name': 'registration/password_reset_confirm.html'},
name='password_reset_confirm'),
url(r'^reset/done', password_reset_complete, {'template_name': 'registration/password_reset_complete.html'},
name='password_reset_complete'),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| gpl-3.0 | -4,769,158,905,202,402,000 | 56.783784 | 149 | 0.703929 | false | 3.465154 | false | false | false |
benbenbenbenbenbenbenbenben/mitsingen | mitsingen/settings.py | 1 | 3574 | """
Django settings for mitsingen project.
Generated by 'django-admin startproject' using Django 1.9.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY", '')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = os.environ.get("DJANGO_DEBUG", "False") == "True"
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'sing.apps.SingConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mitsingen.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mitsingen.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = 'static/'
MEDIA_ROOT = os.path.join(BASE_DIR, "media")
MEDIA_URL = "/media/"
EMAIL_HOST = os.environ.get("DJANGO_EMAIL_HOST", '')
EMAIL_PORT = 465
EMAIL_HOST_USER = os.environ.get("DJANGO_EMAIL_HOST_USER", '')
EMAIL_HOST_PASSWORD = os.environ.get("DJANGO_EMAIL_HOST_PASSWORD", '')
EMAIL_USE_SSL = True
LOGIN_URL = "/sing"
| agpl-3.0 | 8,544,258,707,033,894,000 | 25.671642 | 91 | 0.688025 | false | 3.490234 | false | false | false |
liqd/a4-meinberlin | meinberlin/apps/dashboard/__init__.py | 1 | 2358 | from adhocracy4.dashboard import ProjectDashboard
from adhocracy4.dashboard import components
default_app_config = 'meinberlin.apps.dashboard.apps.Config'
class TypedProjectDashboard(ProjectDashboard):
def __init__(self, project):
if project.project_type == 'meinberlin_bplan.Bplan':
project = project.externalproject.bplan
elif (project.project_type
== 'meinberlin_extprojects.ExternalProject'):
project = project.externalproject
elif (project.project_type
== 'meinberlin_projectcontainers.ProjectContainer'):
project = project.projectcontainer
super().__init__(project)
def get_project_components(self):
if self.project.project_type == 'meinberlin_bplan.Bplan':
return [components.projects.get('bplan'),
components.projects.get('plans'),
components.projects.get('adminlog')]
elif (self.project.project_type
== 'meinberlin_extprojects.ExternalProject'):
return [components.projects.get('external'),
components.projects.get('topics'),
components.projects.get('point'),
components.projects.get('plans'),
components.projects.get('adminlog')]
elif (self.project.project_type
== 'meinberlin_projectcontainers.ProjectContainer'):
return [components.projects.get('container-basic'),
components.projects.get('container-information'),
components.projects.get('topics'),
components.projects.get('point'),
components.projects.get('plans'),
components.projects.get('container-projects')]
return [component for component in components.get_project_components()
if component.is_effective(self.project)]
def get_module_components(self):
if self.project.project_type == 'meinberlin_bplan.Bplan':
return []
elif (self.project.project_type
== 'meinberlin_extprojects.ExternalProject'):
return []
elif (self.project.project_type
== 'meinberlin_projectcontainers.ProjectContainer'):
return []
return components.get_module_components()
| agpl-3.0 | -3,359,733,828,596,905,000 | 43.490566 | 78 | 0.611535 | false | 4.632613 | false | false | false |
thejeshgn/dondeestas | main/settings.py | 1 | 5012 | """
Django settings for dondeestas project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Parse database configuration from $DATABASE_URL
import dj_database_url
DATABASES = {'default': dj_database_url.config(default='postgres://ukqvlsqxejpebv:BthNM1aQN2DNd8ysGvG2N2JK6j@ec2-174-129-197-200.compute-1.amazonaws.com:5432/dflgcpfmok4avu')}
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Static asset configuration
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = 'staticfiles'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
PROJECT_PATH = os.path.dirname(__file__)
PROJECT_ROOT = os.path.abspath(os.path.join(PROJECT_PATH, os.pardir))
import wsgi
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = (
'admin_tools',
'admin_tools.theming',
'admin_tools.menu',
'admin_tools.dashboard',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_extensions',
'south',
'rest_framework',
'rest_framework.authtoken',
'api',
'misc',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.core.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.tz",
"django.contrib.messages.context_processors.messages"
)
#ADMIN_TOOLS_INDEX_DASHBOARD = 'dashboard.CustomIndexDashboard'
#ADMIN_TOOLS_APP_INDEX_DASHBOARD = 'dashboard.CustomAppIndexDashboard'
#ADMIN_TOOLS_THEMING_CSS = 'css/admin_theming.css'
TRACK_AJAX_REQUESTS=True
TRACK_PAGEVIEWS=True
DEBUG_TOOLBAR_PATCH_SETTINGS = False
ROOT_URLCONF = 'main.urls'
WSGI_APPLICATION = 'main.wsgi.application'
DATE_INPUT_FORMATS=(
'%d-%m-%Y', '%d/%m/%Y', '%d/%m/%y', # '25-10-2006', '25/10/2006', '25/10/06'
'%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
'%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
'%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
'%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
)
DATETIME_INPUT_FORMATS=(
'%d-%m-%Y %H:%M:%S', # '2006-10-25 14:30:59'
'%d-%m-%Y %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%d-%m-%Y %H:%M', # '2006-10-25 14:30'
'%d-%m-%Y', # '2006-10-25'
'%d/%m/%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%d/%m/%Y %H:%M:%S.%f', # '10/25/2006 14:30:59.000200'
'%d/%m/%Y %H:%M', # '10/25/2006 14:30'
'%d/%m/%Y', # '10/25/2006'
'%d/%m/%y %H:%M:%S', # '10/25/06 14:30:59'
'%d/%m/%y %H:%M:%S.%f', # '10/25/06 14:30:59.000200'
'%d/%m/%y %H:%M', # '10/25/06 14:30'
'%d/%m/%y', # '10/25/06'
)
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Kolkata'
#USE_I18N = True
#USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(PROJECT_ROOT, "static")
STATICFILES_DIRS = (
os.path.join(PROJECT_ROOT, "main","static"),
)
CRISPY_TEMPLATE_PACK = 'bootstrap3'
CRISPY_FAIL_SILENTLY = not DEBUG
TEMPLATE_DIRS = (
os.path.join(PROJECT_PATH, 'templates'),
)
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': ('rest_framework.permissions.AllowAny',),
'PAGINATE_BY': 10,
'DEFAULT_AUTHENTICATION_CLASSES': (
)
}
try:
from production_settings import *
except ImportError:
#If production settings are present don't import local settings
try:
from local_settings import *
except ImportError:
print "couldnt import local_settings"
pass
| gpl-3.0 | 2,988,904,299,523,692,500 | 26.091892 | 175 | 0.651636 | false | 2.924154 | false | false | false |
johnnovak/twyg | doc/figures/nodes-rect-roundingStyle.py | 1 | 1345 | import os, sys
from fig import *
config1 = r"""
[layout]
style layout
[node]
style rect
fontName $FONTNAME
fontSize $FONTSIZE
textBaselineCorrection $BASELINE_CORR
strokeWidth 3
roundingStyle screen
textPadX 22
textPadY 8
[connection]
style curve
[color]
style cycle
colorscheme "mint-examples3"
fontColorAuto no
fontColor #fff
"""
config2 = r"""
[layout]
style layout
[node]
style rect
fontName $FONTNAME
fontSize $FONTSIZE
textBaselineCorrection $BASELINE_CORR
strokeWidth 3
roundingStyle arc
cornerRadius 25
textPadX 22
textPadY 8
[connection]
style curve
[color]
style cycle
colorscheme "mint-examples"
fontColorAuto no
fontColor #fff
"""
data1 = { 'toothsome': [] }
data2 = { 'flittermice': [] }
scale = 0.8
trees = [
create_tree(config1, data1),
create_tree(config2, data2)
]
write_all_trees(trees, scale)
| mit | 1,346,015,149,236,001,500 | 19.074627 | 44 | 0.463197 | false | 4.22956 | false | true | false |
Dangetsu/vnr | Frameworks/Sakura/py/apps/reader/qml/qmlrc.py | 1 | 7710 | # coding: utf8
# rc.py
# 10/8/2012 jichi
# Runtime resource locations
#
# All paths are using ascii encoding.
# Assume there are no unicode characters in the relative path.
from PySide.QtCore import Qt, QUrl
from PySide.QtGui import QImage, QPixmap, QIcon
from Qt5.QtQuick1 import QDeclarativeImageProvider
from sakurakit.skdebug import derror
import rc
## Resource image provider ##
# See: http://www.lothlorien.com/kf6gpe/?p=234
class ResourceImageProvider(QDeclarativeImageProvider):
PROVIDER_ID = 'rc'
def __init__(self, type=QDeclarativeImageProvider.Pixmap):
"""
@param type int QDeclarativeImageProvider.ImageType either Pixmap or Image
Use QPixmap as default, which renders faster than QImage
"""
super(ResourceImageProvider, self).__init__(type)
def requestImage(self, name, rsize, size):
"""@reimp @public
@param[in] providerId unicode unused
@param[out] rsize QSize
@param[in] size QSize
@return QImage not None
virtual QImage requestImage(const QString &id, QSize *size, const QSize &requestedSize)
"""
ret = QImage(rc.image_path(name))
if ret.isNull():
derror("failed to load image: '%s'" % name)
elif ret.size() != size:
ret = (ret.scaled(size, Qt.KeepAspectRatio, Qt.SmoothTransformation) if not size.isEmpty() else
ret.scaledToWidth(size.width(), Qt.SmoothTransformation) if size.width() > 0 else
ret.scaledToHeight(size.height(), Qt.SmoothTransformation) if size.height() > 0 else
ret)
rsize.setWidth(ret.width())
rsize.setHeight(ret.height())
return ret
def requestPixmap(self, name, rsize, size):
"""@reimp @public
@param[in] providerId unicode unused
@param[out] rsize QSize
@param[in] size QSize
@return QPixmap not None
virtual QPixmap requestPixmap(const QString &id, QSize *size, const QSize &requestedSize)
"""
ret = QPixmap(rc.image_path(name))
if ret.isNull():
derror("failed to load image: '%s'" % name)
elif ret.size() != size:
ret = (ret.scaled(size, Qt.KeepAspectRatio, Qt.SmoothTransformation) if not size.isEmpty() else
ret.scaledToWidth(size.width(), Qt.SmoothTransformation) if size.width() > 0 else
ret.scaledToHeight(size.height(), Qt.SmoothTransformation) if size.height() > 0 else
ret)
rsize.setWidth(ret.width())
rsize.setHeight(ret.height())
return ret
## File image provider ##
# See: http://www.lothlorien.com/kf6gpe/?p=234
class FileImageProvider(QDeclarativeImageProvider):
PROVIDER_ID = 'file'
"""
Default icon size on Windows
See: http://msdn.microsoft.com/en-us/library/ms997636.aspx
"""
ICON_SIZE = 48, 48
def __init__(self):
"""
Use QPixmap as default, which renders faster than QImage
"""
super(FileImageProvider, self).__init__(QDeclarativeImageProvider.Pixmap)
def requestPixmap(self, path, rsize, size):
"""@reimp @public
@param[in] providerId unicode unused
@param[out] rsize QSize
@param[in] size QSize
@return QPixmap not None
virtual QPixmap requestPixmap(const QString &id, QSize *size, const QSize &requestedSize)
"""
icon = rc.file_icon(path)
if icon.isNull():
derror("failed to load image: '%s'" % path)
ret = QPixmap()
elif not size.isEmpty():
ret = icon.pixmap(size)
else:
#sizes = icon.availableSizes(QIcon.Selected, QIcon.Off) # crash for executable
ret = icon.pixmap(*FileImageProvider.ICON_SIZE)
rsize.setWidth(ret.width())
rsize.setHeight(ret.height())
return ret
## URL image provider ##
class SpringImageProvider(QDeclarativeImageProvider):
PROVIDER_ID = 'spring'
def __init__(self):
"""
Use QPixmap as default, which renders faster than QImage
"""
super(SpringImageProvider, self).__init__(QDeclarativeImageProvider.Pixmap)
def requestPixmap(self, path, rsize, size):
"""@reimp @public
@param[in] providerId unicode unused
@param[out] rsize QSize
@param[in] size QSize
@return QPixmap not None
virtual QPixmap requestPixmap(const QString &id, QSize *size, const QSize &requestedSize)
"""
ret = QPixmap(QUrl(path).toLocalFile())
if ret.isNull():
derror("failed to load image: '%s'" % path)
elif size != ret.size() and not size.isEmpty() and not ret.size().isEmpty():
if ret.width() * size.height() > ret.height() * size.width():
ret = ret.scaledToHeight(min(800, size.height()), Qt.SmoothTransformation)
else:
w = 1000 if ret.width() > ret.height() else 600
ret = ret.scaledToWidth(min(w, size.width()), Qt.SmoothTransformation)
#elif size != ret.size():
# ret = (ret.scaled(size, Qt.KeepAspectRatio, Qt.SmoothTransformation) if not size.isEmpty() else
# ret.scaledToWidth(size.width(), Qt.SmoothTransformation) if size.width() > 0 else
# ret.scaledToHeight(size.height(), Qt.SmoothTransformation) if size.height() > 0 else
# ret)
rsize.setWidth(ret.width())
rsize.setHeight(ret.height())
return ret
# EOF
## URL image provider ##
#class UrlImageProvider(QDeclarativeImageProvider):
#
# PROVIDER_ID = 'url'
#
# def __init__(self):
# """
# Use QPixmap as default, which renders faster than QImage
# """
# super(UrlImageProvider, self).__init__(QDeclarativeImageProvider.Pixmap)
#
# def requestPixmap(self, path, rsize, size):
# """@reimp @public
# @param[in] providerId unicode unused
# @param[out] rsize QSize
# @param[in] size QSize
# @return QPixmap not None
#
# virtual QPixmap requestPixmap(const QString &id, QSize *size, const QSize &requestedSize)
# """
#
# ret = QPixmap(QUrl(path).toLocalFile())
# if ret.isNull():
# derror("failed to load image: '%s'" % path)
# elif size != ret.size():
# ret = (ret.scaled(size, Qt.KeepAspectRatio, Qt.SmoothTransformation) if not size.isEmpty() else
# ret.scaledToWidth(size.width(), Qt.SmoothTransformation) if size.width() > 0 else
# ret.scaledToHeight(size.height(), Qt.SmoothTransformation) if size.height() > 0 else
# ret)
# rsize.setWidth(ret.width())
# rsize.setHeight(ret.height())
# return ret
## Filter image provider ##
#from qimp import qimp
#class FilterImageProvider(QDeclarativeImageProvider):
#
# PROVIDER_ID = 'filter'
#
# def __init__(self):
# """
# Use QPixmap as default, which renders faster than QImage
# """
# super(FilterImageProvider, self).__init__(QDeclarativeImageProvider.Pixmap)
#
# def requestPixmap(self, path, rsize, size):
# """@reimp @public
# @param[in] providerId unicode unused
# @param[out] rsize QSize
# @param[in] size QSize
# @return QPixmap not None
#
# virtual QPixmap requestPixmap(const QString &id, QSize *size, const QSize &requestedSize)
# """
# ret = QPixmap(QUrl(path).toLocalFile())
# if ret.isNull():
# derror("failed to load image: '%s'" % path)
# #elif size != ret.size():
# elif size.width() < ret.width() or size.height() < ret.height(): # do increase size
# ret = (ret.scaled(size, Qt.KeepAspectRatio, Qt.SmoothTransformation) if not size.isEmpty() else
# ret.scaledToWidth(size.width(), Qt.SmoothTransformation) if size.width() > 0 and size.width() < ret.width() else
# ret.scaledToHeight(size.height(), Qt.SmoothTransformation) if size.height() > 0 and size.height() < ret.height() else
# ret)
# rsize.setWidth(ret.width())
# rsize.setHeight(ret.height())
# if ret and not ret.isNull():
# qimp.gradientpixmap(ret)
# return ret
| gpl-3.0 | 9,057,902,787,735,632,000 | 33.115044 | 131 | 0.657847 | false | 3.265565 | false | false | false |
museumsvictoria/nodel-recipes | Advantech ADAM 6050 6060 relay module/legacy/pymodbus/server/async.py | 2 | 8820 | '''
Implementation of a Twisted Modbus Server
------------------------------------------
'''
from binascii import b2a_hex
from twisted.internet import protocol
from twisted.internet.protocol import ServerFactory
from pymodbus.constants import Defaults
from pymodbus.factory import ServerDecoder
from pymodbus.datastore import ModbusServerContext
from pymodbus.device import ModbusControlBlock
from pymodbus.device import ModbusAccessControl
from pymodbus.device import ModbusDeviceIdentification
from pymodbus.transaction import ModbusSocketFramer, ModbusAsciiFramer
from pymodbus.interfaces import IModbusFramer
from pymodbus.exceptions import *
from pymodbus.pdu import ModbusExceptions as merror
from pymodbus.internal.ptwisted import InstallManagementConsole
#---------------------------------------------------------------------------#
# Logging
#---------------------------------------------------------------------------#
import logging
_logger = logging.getLogger(__name__)
#---------------------------------------------------------------------------#
# Modbus TCP Server
#---------------------------------------------------------------------------#
class ModbusTcpProtocol(protocol.Protocol):
''' Implements a modbus server in twisted '''
def connectionMade(self):
''' Callback for when a client connects
Note, since the protocol factory cannot be accessed from the
protocol __init__, the client connection made is essentially our
__init__ method.
'''
_logger.debug("Client Connected [%s]" % self.transport.getHost())
self.framer = self.factory.framer(decoder=self.factory.decoder)
def connectionLost(self, reason):
''' Callback for when a client disconnects
:param reason: The client's reason for disconnecting
'''
_logger.debug("Client Disconnected: %s" % reason)
def dataReceived(self, data):
''' Callback when we receive any data
:param data: The data sent by the client
'''
_logger.debug(" ".join([hex(ord(x)) for x in data]))
if not self.factory.control.ListenOnly:
self.framer.processIncomingPacket(data, self._execute)
def _execute(self, request):
''' Executes the request and returns the result
:param request: The decoded request message
'''
try:
context = self.factory.store[request.unit_id]
response = request.execute(context)
except Exception, ex:
_logger.debug("Datastore unable to fulfill request %s" % ex)
response = request.doException(merror.SlaveFailure)
#self.framer.populateResult(response)
response.transaction_id = request.transaction_id
response.unit_id = request.unit_id
self._send(response)
def _send(self, message):
''' Send a request (string) to the network
:param message: The unencoded modbus response
'''
self.factory.control.Counter.BusMessage += 1
pdu = self.framer.buildPacket(message)
_logger.debug('send: %s' % b2a_hex(pdu))
return self.transport.write(pdu)
class ModbusServerFactory(ServerFactory):
'''
Builder class for a modbus server
This also holds the server datastore so that it is
persisted between connections
'''
protocol = ModbusTcpProtocol
def __init__(self, store, framer=None, identity=None):
''' Overloaded initializer for the modbus factory
If the identify structure is not passed in, the ModbusControlBlock
uses its own empty structure.
:param store: The ModbusServerContext datastore
:param framer: The framer strategy to use
:param identity: An optional identify structure
'''
self.decoder = ServerDecoder()
self.framer = framer or ModbusSocketFramer
self.store = store or ModbusServerContext()
self.control = ModbusControlBlock()
self.access = ModbusAccessControl()
if isinstance(identity, ModbusDeviceIdentification):
self.control.Identity.update(identity)
#---------------------------------------------------------------------------#
# Modbus UDP Server
#---------------------------------------------------------------------------#
class ModbusUdpProtocol(protocol.DatagramProtocol):
''' Implements a modbus udp server in twisted '''
def __init__(self, store, framer=None, identity=None):
''' Overloaded initializer for the modbus factory
If the identify structure is not passed in, the ModbusControlBlock
uses its own empty structure.
:param store: The ModbusServerContext datastore
:param framer: The framer strategy to use
:param identity: An optional identify structure
'''
framer = framer or ModbusSocketFramer
self.framer = framer(decoder=ServerDecoder())
self.store = store or ModbusServerContext()
self.control = ModbusControlBlock()
self.access = ModbusAccessControl()
if isinstance(identity, ModbusDeviceIdentification):
self.control.Identity.update(identity)
def datagramReceived(self, data, addr):
''' Callback when we receive any data
:param data: The data sent by the client
'''
_logger.debug("Client Connected [%s:%s]" % addr)
_logger.debug(" ".join([hex(ord(x)) for x in data]))
if not self.control.ListenOnly:
continuation = lambda request: self._execute(request, addr)
self.framer.processIncomingPacket(data, continuation)
def _execute(self, request, addr):
''' Executes the request and returns the result
:param request: The decoded request message
'''
try:
context = self.store[request.unit_id]
response = request.execute(context)
except Exception, ex:
_logger.debug("Datastore unable to fulfill request %s" % ex)
response = request.doException(merror.SlaveFailure)
#self.framer.populateResult(response)
response.transaction_id = request.transaction_id
response.unit_id = request.unit_id
self._send(response, addr)
def _send(self, message, addr):
''' Send a request (string) to the network
:param message: The unencoded modbus response
:param addr: The (host, port) to send the message to
'''
self.control.Counter.BusMessage += 1
pdu = self.framer.buildPacket(message)
_logger.debug('send: %s' % b2a_hex(pdu))
return self.transport.write(pdu, addr)
#---------------------------------------------------------------------------#
# Starting Factories
#---------------------------------------------------------------------------#
def StartTcpServer(context, identity=None):
''' Helper method to start the Modbus Async TCP server
:param context: The server data context
:param identify: The server identity to use (default empty)
'''
from twisted.internet import reactor
_logger.info("Starting Modbus TCP Server on %s" % Defaults.Port)
framer = ModbusSocketFramer
factory = ModbusServerFactory(context, framer, identity)
InstallManagementConsole({ 'factory' : factory })
reactor.listenTCP(Defaults.Port, factory)
reactor.run()
def StartUdpServer(context, identity=None):
''' Helper method to start the Modbus Async Udp server
:param context: The server data context
:param identify: The server identity to use (default empty)
'''
from twisted.internet import reactor
_logger.info("Starting Modbus UDP Server on %s" % Defaults.Port)
framer = ModbusSocketFramer
server = ModbusUdpProtocol(context, framer, identity)
reactor.listenUDP(Defaults.Port, server)
reactor.run()
def StartSerialServer(context, identity=None, framer=ModbusAsciiFramer, **kwargs):
''' Helper method to start the Modbus Async Serial server
:param context: The server data context
:param identify: The server identity to use (default empty)
:param framer: The framer to use (default ModbusAsciiFramer)
'''
from twisted.internet import reactor
from twisted.internet.serialport import SerialPort
_logger.info("Starting Modbus Serial Server on %s" % kwargs['device'])
factory = ModbusServerFactory(context, framer, identity)
protocol = factory.buildProtocol(None)
handle = SerialPort(protocol, kwargs['device'], reactor, Defaults.Baudrate)
reactor.run()
#---------------------------------------------------------------------------#
# Exported symbols
#---------------------------------------------------------------------------#
__all__ = [
"StartTcpServer", "StartUdpServer", "StartSerialServer",
]
| mit | -5,174,017,304,118,086,000 | 36.854077 | 82 | 0.618141 | false | 4.579439 | false | false | false |
agry/NGECore2 | scripts/mobiles/tatooine/tusken_warmaster.py | 2 | 1337 | import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('tusken_war_master')
mobileTemplate.setLevel(32)
mobileTemplate.setDifficulty(Difficulty.ELITE)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(True)
mobileTemplate.setScale(1)
mobileTemplate.setSocialGroup("tusken raider")
mobileTemplate.setAssistRange(6)
mobileTemplate.setStalker(True)
mobileTemplate.setOptionsBitmask(Options.AGGRESSIVE | Options.ATTACKABLE)
templates = Vector()
templates.add('object/mobile/shared_tusken_raider.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/melee/unarmed/shared_unarmed_default.iff', WeaponType.UNARMED, 1.0, 6, 'kinetic')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
mobileTemplate.setDefaultAttack('meleehit')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('tusken_warmaster', mobileTemplate)
return | lgpl-3.0 | -598,558,016,224,606,700 | 32.45 | 129 | 0.831713 | false | 3.472727 | false | false | false |
kuntzer/binfind | utils.py | 1 | 2440 | import cPickle as pickle
import csv
import gzip
import numpy as np
import os
def writepickle(obj, filepath, protocol = -1):
"""
I write your python object obj into a pickle file at filepath.
If filepath ends with .gz, I'll use gzip to compress the pickle.
Leave protocol = -1 : I'll use the latest binary protocol of pickle.
"""
if os.path.splitext(filepath)[1] == ".gz":
pkl_file = gzip.open(filepath, 'wb')
else:
pkl_file = open(filepath, 'wb')
pickle.dump(obj, pkl_file, protocol)
pkl_file.close()
def readpickle(filepath):
"""
I read a pickle file and return whatever object it contains.
If the filepath ends with .gz, I'll unzip the pickle file.
"""
if os.path.splitext(filepath)[1] == ".gz":
pkl_file = gzip.open(filepath,'rb')
else:
pkl_file = open(filepath, 'rb')
obj = pickle.load(pkl_file)
pkl_file.close()
return obj
def find_nearest(array,value):
""" Find nearest value is an array """
idx = (np.abs(array-value)).argmin()
return idx
def mkdir(somedir):
"""
A wrapper around os.makedirs.
:param somedir: a name or path to a directory which I should make.
"""
if not os.path.isdir(somedir):
os.makedirs(somedir)
def classify(pred, threshold):
classification = np.zeros_like(pred)
classification[pred >= threshold] = 1
return classification
def load_bmg(fname, main_sequence):
data=[]
with open(fname+'.dat') as observability_file:
observability_data = csv.reader(observability_file, delimiter="\t")
for row in observability_data:
# if line is empty, skip otherwise filter out the blank
dline=row[0].split()
if len(dline)==17 and not dline[6].isdigit():
dline.insert(6, '0')
if dline[0][0]=='#': continue
data.append(np.asarray(dline, dtype=np.float))
data=np.asarray(data)
if main_sequence:
data=data[data[:,2] == 5] #Takes only main sequence stars
return data
def rdisk(radius, norien=25, nrad=35):
orientations = np.linspace(0., np.pi * 2., norien, endpoint=False)
dtheta = (orientations[:2] / 2.)[-1]
nrad = float(nrad)
radii = ( np.arange(nrad) / (nrad - 1) )**2 * float(radius)
coord = []
seen_nought = False
for ir, r in enumerate(radii):
if r == 0 :
if not seen_nought:
coord.append([0., 0.])
seen_nought = True
continue
for orientation in orientations:
x = np.cos(orientation + dtheta * (ir % 2)) * r
y = np.sin(orientation + dtheta * (ir % 2)) * r
coord.append([x, y])
return np.asarray(coord)
| mit | -6,222,757,359,018,498,000 | 25.521739 | 69 | 0.672951 | false | 2.901308 | false | false | false |
UPDDI/mps-database-server | clustergrammer/categories.py | 2 | 11698 | def check_categories(lines):
'''
find out how many row and col categories are available
'''
# count the number of row categories
rcat_line = lines[0].split('\t')
# calc the number of row names and categories
num_rc = 0
found_end = False
# skip first tab
for inst_string in rcat_line[1:]:
if inst_string == '':
if found_end is False:
num_rc = num_rc + 1
else:
found_end = True
max_rcat = 15
if max_rcat > len(lines):
max_rcat = len(lines) - 1
num_cc = 0
for i in range(max_rcat):
ccat_line = lines[i + 1].split('\t')
# make sure that line has length greater than one to prevent false cats from
# trailing new lines at end of matrix
if ccat_line[0] == '' and len(ccat_line) > 1:
num_cc = num_cc + 1
num_labels = {}
num_labels['row'] = num_rc + 1
num_labels['col'] = num_cc + 1
return num_labels
def dict_cat(net, define_cat_colors=False):
'''
make a dictionary of node-category associations
'''
# print('---------------------------------')
# print('---- dict_cat: before setting cat colors')
# print('---------------------------------\n')
# print(define_cat_colors)
# print(net.viz['cat_colors'])
net.persistent_cat = True
for inst_rc in ['row', 'col']:
inst_keys = list(net.dat['node_info'][inst_rc].keys())
all_cats = [x for x in inst_keys if 'cat-' in x]
for inst_name_cat in all_cats:
dict_cat = {}
tmp_cats = net.dat['node_info'][inst_rc][inst_name_cat]
tmp_nodes = net.dat['nodes'][inst_rc]
for i in range(len(tmp_cats)):
inst_cat = tmp_cats[i]
inst_node = tmp_nodes[i]
if inst_cat not in dict_cat:
dict_cat[inst_cat] = []
dict_cat[inst_cat].append(inst_node)
tmp_name = 'dict_' + inst_name_cat.replace('-', '_')
net.dat['node_info'][inst_rc][tmp_name] = dict_cat
# merge with old cat_colors by default
cat_colors = net.viz['cat_colors']
if define_cat_colors == True:
cat_number = 0
for inst_rc in ['row', 'col']:
inst_keys = list(net.dat['node_info'][inst_rc].keys())
all_cats = [x for x in inst_keys if 'cat-' in x]
for cat_index in all_cats:
if cat_index not in cat_colors[inst_rc]:
cat_colors[inst_rc][cat_index] = {}
cat_names = sorted(list(set(net.dat['node_info'][inst_rc][cat_index])))
# loop through each category name and assign a color
for tmp_name in cat_names:
# using the same rules as the front-end to define cat_colors
inst_color = get_cat_color(cat_number + cat_names.index(tmp_name))
check_name = tmp_name
# check if category is string type and non-numeric
try:
float(check_name)
is_string_cat = False
except:
is_string_cat = True
if is_string_cat == True:
# check for default non-color
if ': ' in check_name:
check_name = check_name.split(': ')[1]
# if check_name == 'False' or check_name == 'false':
if 'False' in check_name or 'false' in check_name:
inst_color = '#eee'
if 'Not ' in check_name:
inst_color = '#eee'
# print('cat_colors')
# print('----------')
# print(cat_colors[inst_rc][cat_index])
# do not overwrite old colors
if tmp_name not in cat_colors[inst_rc][cat_index] and is_string_cat:
cat_colors[inst_rc][cat_index][tmp_name] = inst_color
# print('overwrite: ' + tmp_name + ' -> ' + str(inst_color))
cat_number = cat_number + 1
net.viz['cat_colors'] = cat_colors
# print('after setting cat_colors')
# print(net.viz['cat_colors'])
# print('======================================\n\n')
def calc_cat_clust_order(net, inst_rc):
'''
cluster category subset of data
'''
from .__init__ import Network
from copy import deepcopy
from . import calc_clust, run_filter
inst_keys = list(net.dat['node_info'][inst_rc].keys())
all_cats = [x for x in inst_keys if 'cat-' in x]
if len(all_cats) > 0:
for inst_name_cat in all_cats:
tmp_name = 'dict_' + inst_name_cat.replace('-', '_')
dict_cat = net.dat['node_info'][inst_rc][tmp_name]
unordered_cats = dict_cat.keys()
ordered_cats = order_categories(unordered_cats)
# this is the ordering of the columns based on their category, not
# including their clustering ordering within category
all_cat_orders = []
tmp_names_list = []
for inst_cat in ordered_cats:
inst_nodes = dict_cat[inst_cat]
tmp_names_list.extend(inst_nodes)
# cat_net = deepcopy(Network())
# cat_net.dat['mat'] = deepcopy(net.dat['mat'])
# cat_net.dat['nodes'] = deepcopy(net.dat['nodes'])
# cat_df = cat_net.dat_to_df()
# sub_df = {}
# if inst_rc == 'col':
# sub_df['mat'] = cat_df['mat'][inst_nodes]
# elif inst_rc == 'row':
# # need to transpose df
# cat_df['mat'] = cat_df['mat'].transpose()
# sub_df['mat'] = cat_df['mat'][inst_nodes]
# sub_df['mat'] = sub_df['mat'].transpose()
# # filter matrix before clustering
# ###################################
# threshold = 0.0001
# sub_df = run_filter.df_filter_row_sum(sub_df, threshold)
# sub_df = run_filter.df_filter_col_sum(sub_df, threshold)
# # load back to dat
# cat_net.df_to_dat(sub_df)
# cat_mat_shape = cat_net.dat['mat'].shape
# print('***************')
# try:
# if cat_mat_shape[0]>1 and cat_mat_shape[1] > 1 and all_are_numbers == False:
# calc_clust.cluster_row_and_col(cat_net, 'cos')
# inst_cat_order = cat_net.dat['node_info'][inst_rc]['clust']
# else:
# inst_cat_order = list(range(len(cat_net.dat['nodes'][inst_rc])))
# except:
# inst_cat_order = list(range(len(cat_net.dat['nodes'][inst_rc])))
# prev_order_len = len(all_cat_orders)
# # add prev order length to the current order number
# inst_cat_order = [i + prev_order_len for i in inst_cat_order]
# all_cat_orders.extend(inst_cat_order)
# # generate ordered list of row/col names, which will be used to
# # assign the order to specific nodes
# names_clust_list = [x for (y, x) in sorted(zip(all_cat_orders,
# tmp_names_list))]
names_clust_list = tmp_names_list
# calc category-cluster order
final_order = []
for i in range(len(net.dat['nodes'][inst_rc])):
inst_node_name = net.dat['nodes'][inst_rc][i]
inst_node_num = names_clust_list.index(inst_node_name)
final_order.append(inst_node_num)
inst_index_cat = inst_name_cat.replace('-', '_') + '_index'
net.dat['node_info'][inst_rc][inst_index_cat] = final_order
def order_categories(unordered_cats):
'''
If categories are strings, then simple ordering is fine.
If categories are values then I'll need to order based on their values.
The final ordering is given as the original categories (including titles) in a
ordered list.
'''
no_titles = remove_titles(unordered_cats)
all_are_numbers = check_all_numbers(no_titles)
if all_are_numbers:
ordered_cats = order_cats_based_on_values(unordered_cats, no_titles)
else:
ordered_cats = sorted(unordered_cats)
return ordered_cats
def order_cats_based_on_values(unordered_cats, values_list):
import pandas as pd
try:
# convert values_list to values
values_list = [float(i) for i in values_list]
inst_series = pd.Series(data=values_list, index=unordered_cats)
inst_series.sort_values(inplace=True)
ordered_cats = inst_series.index.tolist()
# ordered_cats = unordered_cats
except:
# keep default ordering if error occurs
print('error sorting cats based on values ')
ordered_cats = unordered_cats
return ordered_cats
def check_all_numbers(no_titles):
all_numbers = True
for tmp in no_titles:
if is_number(tmp) == False:
all_numbers = False
return all_numbers
def remove_titles(cats):
from copy import deepcopy
# check if all have titles
###########################
all_have_titles = True
for inst_cat in cats:
if is_number(inst_cat) == False:
if ': ' not in inst_cat:
all_have_titles = False
else:
all_have_titles = False
if all_have_titles:
no_titles = cats
no_titles = [i.split(': ')[1] for i in no_titles]
else:
no_titles = cats
return no_titles
def is_number(s):
try:
float(s)
return True
except ValueError:
return False
def get_cat_color(cat_num):
all_colors = [ "#393b79", "#aec7e8", "#ff7f0e", "#ffbb78", "#98df8a", "#bcbd22",
"#404040", "#ff9896", "#c5b0d5", "#8c564b", "#1f77b4", "#5254a3", "#FFDB58",
"#c49c94", "#e377c2", "#7f7f7f", "#2ca02c", "#9467bd", "#dbdb8d", "#17becf",
"#637939", "#6b6ecf", "#9c9ede", "#d62728", "#8ca252", "#8c6d31", "#bd9e39",
"#e7cb94", "#843c39", "#ad494a", "#d6616b", "#7b4173", "#a55194", "#ce6dbd",
"#de9ed6"];
inst_color = all_colors[cat_num % len(all_colors)]
return inst_color
def dendro_cats(net, axis, dendro_level):
if axis == 0:
axis = 'row'
if axis == 1:
axis = 'col'
dendro_level = str(dendro_level)
dendro_level_name = dendro_level
if len(dendro_level) == 1:
dendro_level = '0' + dendro_level
df = net.export_df()
if axis == 'row':
old_names = df.index.tolist()
elif axis == 'col':
old_names = df.columns.tolist()
if 'group' in net.dat['node_info'][axis]:
inst_groups = net.dat['node_info'][axis]['group'][dendro_level]
new_names = []
for i in range(len(old_names)):
inst_name = old_names[i]
group_cat = 'Group '+ str(dendro_level_name) +': cat-' + str(inst_groups[i])
inst_name = inst_name + (group_cat,)
new_names.append(inst_name)
if axis == 'row':
df.index = new_names
elif axis == 'col':
df.columns = new_names
net.load_df(df)
else:
print('please cluster, using make_clust, to define dendrogram groups before running dendro_cats')
def add_cats(net, axis, cat_data):
try:
df = net.export_df()
if axis == 'row':
labels = df.index.tolist()
elif axis == 'col':
labels = df.columns.tolist()
if 'title' in cat_data:
inst_title = cat_data['title']
else:
inst_title = 'New Category'
all_cats = cat_data['cats']
# loop through all labels
new_labels = []
for inst_label in labels:
if type(inst_label) is tuple:
check_name = inst_label[0]
found_tuple = True
else:
check_name = inst_label
found_tuple = False
if ': ' in check_name:
check_name = check_name.split(': ')[1]
# default to False for found cat, overwrite if necessary
found_cat = inst_title + ': False'
# check all categories in cats
for inst_cat in all_cats:
inst_names = all_cats[inst_cat]
if check_name in inst_names:
found_cat = inst_title + ': ' + inst_cat
# add category to label
if found_tuple is True:
new_label = inst_label + (found_cat,)
else:
new_label = (inst_label, found_cat)
new_labels.append(new_label)
# add labels back to DataFrame
if axis == 'row':
df.index = new_labels
elif axis == 'col':
df.columns = new_labels
net.load_df(df)
except:
print('error adding new categories')
| mit | 8,570,142,060,290,531,000 | 26.078704 | 101 | 0.572577 | false | 3.268511 | false | false | false |
autowitch/pypov | scenes/geomorphs/lib/geomorphs/full_5x5_010.py | 1 | 1734 | from pypov.pov import Texture, Pigment, Object, Cylinder, Merge
from pypov.pov import Finish, Box, Cone, Sphere
from pypov.pov import Union, Difference
from pypov.colors import Colors
from lib.base import five_by_five_corner
from lib.textures import cross_hatch_2, wall_texture_1
from lib.metadata import Metadata
from lib.util import float_range
def full_5x5_010_info():
return Metadata("Basic roundish room", "f10",
description="Basic four entrance room",
block_type="full",
bottom=0, top=20,
size="5x5",
repeatable=True,
fully_connected=True,
dead_ends=False,
entrance=False,
has_rooms=True,
passage_type="hewn",
wet=False,
multi_level=False,
keywords=['basic', 'room', 'roundish'])
def full_5x5_010(rotate=(0, 0, 0), translate=(0, 0, 0), detail_level=1,
cross_hatch_texture=cross_hatch_2):
"""docstring for gm02"""
geomorph = Union(
Difference(
Object(five_by_five_corner(), cross_hatch_texture),
Union(
Box((-2.5, 10, 26), ( 2.5, 21, -26)),
Box((26, 10.0001, -2.5), (-26, 21, 2.5)),
Box((-8, 10.00001, -12), (8, 21, 12)),
Box((-12, 10.00002, -8), (12, 21, 8)),
Cylinder((-8, 10.0003, -8), (-8, 21, -8), 4),
Cylinder((8, 10.0004, -8), (8, 21, -8), 4),
Cylinder((-8, 10.0005, 8), (-8, 21, 8), 4),
Cylinder((8, 10.0006, 8), (8, 21, 8), 4),
wall_texture_1
),
),
translate=translate,
rotate=rotate
)
return geomorph
| mit | 8,870,489,732,939,898,000 | 26.09375 | 71 | 0.516724 | false | 3.302857 | false | false | false |
johaness/bifl | bifl/features.py | 2 | 1987 | """
Extract feature batteries from gauss pyramids
"""
import cv
from utils import saveIm
from mods import *
from cpy import *
def stage(lum, sat, rg, by):
lumc = contrast(lum)
lumt = contrast(lumc, 251)
sats = smooth(sat)
satc = contrast(sat)
satt = contrast(satc, 251)
rgc = contrast(rg)
rgt = contrast(rgc, 251)
byc = contrast(by)
byt = contrast(byc, 251)
sob = sobel(lum)
sobs = smooth(sob)
lums = smooth(lum)
rgs = smooth(rg)
bys = smooth(by)
id0, id1, id2 = intdim(lum)
idX = add(zscale(id0), zscale(id2))
return dict(lumc=lumc, lumt=lumt, satc=satc, satt=satt, rgc=rgc, rgt=rgt,
byc=byc, byt=byt, sobs=sobs, lums=lums, id0=id0, id1=id1, id2=id2,
rgs=rgs, sats=sats, bys=bys, idX=idX,)
def noscale(indict):
return indict
def zscaledict(indict):
return dict((n, zscale(m)) for n, m in indict.items())
def histeqdict(indict):
def eq(inmat):
m = zscale(inmat)
return equalize(m)
return dict((n, eq(m)) for n, m in indict.items())
def pyramid(lsrb, count=3, scaler=noscale):
"""
run stage in a downwards pyramid for ``count`` times,
scale each map with ``scaler``,
return list with one dict per pyramid level
"""
features = [scaler(stage(*lsrb))]
if count == 1:
return features
lsrb = list(pyrsdown(*lsrb))
features += pyramid(lsrb, count - 1, scaler)
return features
def base(im, layers):
"""make sure im's dimensions are multiples of 2**layers"""
mod = 2 ** layers
if im.width % mod != 0 or im.height % mod != 0:
im = cv.GetSubRect(im, (
0, 0,
im.width - im.width % mod,
im.height - im.height % mod,))
return cv.GetImage(im)
def extract(image, pyr_levels=3, scaler=zscaledict):
"""extract features from ``image``"""
image = base(image, pyr_levels)
lsrb = colorsplit(image)
return pyramid(lsrb, pyr_levels, scaler=scaler)
| bsd-2-clause | -1,845,461,637,042,928,000 | 24.805195 | 78 | 0.606442 | false | 2.892285 | false | false | false |
linefeedse/korjournal | www/korjournal/viewset/odometerimage.py | 1 | 5931 | from django.shortcuts import render, get_object_or_404
from django.http import HttpResponseRedirect, HttpResponse, HttpResponseNotFound
from rest_framework import viewsets, permissions, filters
from rest_framework.decorators import api_view, permission_classes
from korjournal.models import OdometerSnap, OdometerImage
from korjournal.serializers import OdometerSnapSerializer, OdometerImageSerializer
from korjournal.permissions import IsOwner, AnythingGoes, DenyAll, IsDriver
from django.core.exceptions import ObjectDoesNotExist
from django.views.decorators.csrf import csrf_exempt
from django.db.models import Q
from django.http.request import RawPostDataException
from django.db import IntegrityError
from django.utils import timezone
from datetime import timedelta
from dateutil import tz, parser
import cv2
import subprocess
import sys
import os
class OdometerImageViewSet(viewsets.ModelViewSet):
serializer_class = OdometerImageSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,IsOwner,IsDriver)
def runtess(self, imgfile):
ocr = subprocess.run(["/usr/bin/tesseract", imgfile, "stdout", "nobatch", "digits"], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, universal_newlines=True).stdout
try:
newodokm = int(ocr.replace(" ",""))
return newodokm
except ValueError:
return 0
def do_ocr(self, imgfile, lim_max, lim_min = 0):
img = cv2.imread(imgfile,0)
height, width = img.shape
x1 = 0
y1 = 0
xleft = int(width * 0.17)
xright = int(width * 0.83)
ybottom = int(height * 0.83)
ytop = int(height * 0.17)
xmiddle1 = int(width*0.07)
xmiddle2 = int(width*0.93)
ymiddle1 = int(height*0.07)
ymiddle2 = int(height*0.93)
x2 = width
y2 = height
crops = [
[y1, ybottom, xleft, x2],
[ymiddle1, ymiddle2, xleft, x2],
[ytop, y2, xleft, x2],
[ytop, y2, x1, xright],
[ymiddle1, ymiddle2, x1, xright],
[y1, ybottom, x1, xright],
[ymiddle1, ymiddle2, xmiddle1, xmiddle2]
]
guesses = [0,0,0,0,0]
bestguess = self.runtess(imgfile)
guesses[2] = bestguess
cropnumber = 0
for crop in crops:
y1 = crop[0]
y2 = crop[1]
x1 = crop[2]
x2 = crop[3]
cropped = img[y1:y2, x1:x2]
filename = "/tmp/ocrthis" + str(os.getpid()) + ".png"
cv2.imwrite(filename, cropped)
guess = self.runtess(filename)
os.unlink(filename)
if guess == 0 or guess in guesses:
continue
if guess < lim_min:
if guesses[1] < guess:
guesses[0] = guesses[1]
guesses[1] = guess
continue
if guess > lim_max:
if guesses[3] > guess:
guesses[4] = guesses[3]
guesses[3] = guess
continue
if guess == bestguess:
if guesses[2] > bestguess:
guesses[4] = guesses[3]
guesses[3] = guesses[2]
guesses[2] = guess
if guesses[2] < bestguess and guesses[2] != 0:
guesses[0] = guesses[1]
guesses[1] = guesses[2]
guesses[2] = guess
continue
if guess > bestguess:
bestguess = guess
if guesses[2] > 0:
guesses[0] = guesses[1]
guesses[1] = guesses[2]
guesses[2] = guess
continue
if guess < bestguess:
if guesses[1] > 0:
guesses[0] = guesses[1]
guesses[1] = guess
if guesses[2] == 0:
if guesses[1] > 0:
guesses[2] = guesses[1]
guesses[1] = guesses[0]
guesses[0] = 0
elif guesses[3] > 0:
guesses[2] = guesses[3]
guesses[3] = guesses[4]
guesses[4] = 0
return guesses
def perform_create(self,serializer):
imgfile = self.request.FILES.get('imagefile')
odoimage = serializer.save(driver=self.request.user, imagefile=imgfile)
lim_min = 0
lim_max = 9999999
# From the last three non-null odometers, pick the second largest odometer,
# this is our MIN
# From the MIN date, calculate reasonable kilometers until today,
# this is our MAX
try:
last_odometers = OdometerSnap.objects.filter(
vehicle=odoimage.odometersnap.vehicle,odometer__gt=0).order_by('-when')[:3]
prev_odometers = OdometerSnap.objects.filter(
vehicle=odoimage.odometersnap.vehicle,when__gt=last_odometers[2].when).order_by('-odometer')[:2]
lim_min = prev_odometers[1].odometer
since_days = timezone.now() - prev_odometers[1].when
max_km_per_day = 300
lim_max = prev_odometers[1].odometer + since_days.days * max_km_per_day + max_km_per_day
except IndexError:
pass
guesses = [0,0,0,0,0]
if (odoimage.odometersnap.odometer < 1):
guesses = self.do_ocr("/vagrant/www/media/" + odoimage.imagefile.name, lim_max, lim_min)
odoimage.odometersnap.odometer = guesses[2]
odoimage.odometersnap.save()
odoimage.guess0 = guesses[0]
odoimage.guess1 = guesses[1]
odoimage.guess2 = guesses[2]
odoimage.guess3 = guesses[3]
odoimage.guess4 = guesses[4]
odoimage.save()
def get_queryset(self):
return OdometerImage.objects.filter(Q(odometersnap__vehicle__owner=self.request.user)|Q(driver=self.request.user))
| gpl-2.0 | -7,199,879,768,204,986,000 | 37.764706 | 175 | 0.567021 | false | 3.540896 | false | false | false |
idlesign/django-sitemessage | sitemessage/toolbox.py | 1 | 11609 | from collections import defaultdict
from datetime import timedelta
from itertools import chain
from operator import itemgetter
from typing import Optional, List, Tuple, Union, Iterable, Any, Callable, Dict, Mapping
from django.conf import settings
from django.conf.urls import re_path
from django.contrib.auth.base_user import AbstractBaseUser
from django.http import HttpRequest
from django.utils import timezone
from django.utils.translation import gettext as _
from .exceptions import UnknownMessengerError, UnknownMessageTypeError
from .messages.base import MessageBase
from .messages.plain import PlainTextMessage
from .models import Message, Dispatch, Subscription
from .views import mark_read, unsubscribe
# NB: Some of these unused imports are exposed as part of toolbox API.
from .messages import register_builtin_message_types # noqa
from .utils import ( # noqa
is_iterable, import_project_sitemessage_modules, get_site_url, recipients,
register_messenger_objects, get_registered_messenger_object, get_registered_messenger_objects,
register_message_types, get_registered_message_type, get_registered_message_types,
get_message_type_for_app, override_message_type_for_app, Recipient
)
_ALIAS_SEP = '|'
_PREF_POST_KEY = 'sm_user_pref'
def schedule_messages(
messages: Union[str, MessageBase, List[Union[str, MessageBase]]],
recipients: Optional[Union[Iterable[Recipient], Recipient]] = None,
sender: Optional[AbstractBaseUser] = None,
priority: Optional[int] = None
) -> List[Tuple[Message, List[Dispatch]]]:
"""Schedules a message or messages.
:param messages: str or MessageBase heir or list - use str to create PlainTextMessage.
:param recipients: recipients addresses or Django User model heir instances
If `None` Dispatches should be created before send using `prepare_dispatches()`.
:param User|None sender: User model heir instance
:param priority: number describing message priority. If set overrides priority provided with message type.
"""
if not is_iterable(messages):
messages = (messages,)
results = []
for message in messages:
if isinstance(message, str):
message = PlainTextMessage(message)
resulting_priority = message.priority
if priority is not None:
resulting_priority = priority
results.append(message.schedule(sender=sender, recipients=recipients, priority=resulting_priority))
return results
def send_scheduled_messages(
priority: Optional[int] = None,
ignore_unknown_messengers: bool = False,
ignore_unknown_message_types: bool = False
):
"""Sends scheduled messages.
:param priority: number to limit sending message by this priority.
:param ignore_unknown_messengers: to silence UnknownMessengerError
:param ignore_unknown_message_types: to silence UnknownMessageTypeError
:raises UnknownMessengerError:
:raises UnknownMessageTypeError:
"""
dispatches_by_messengers = Dispatch.group_by_messengers(Dispatch.get_unsent(priority=priority))
for messenger_id, messages in dispatches_by_messengers.items():
try:
messenger_obj = get_registered_messenger_object(messenger_id)
messenger_obj.process_messages(messages, ignore_unknown_message_types=ignore_unknown_message_types)
except UnknownMessengerError:
if ignore_unknown_messengers:
continue
raise
def send_test_message(messenger_id: str, to: Optional[str] = None) -> Any:
"""Sends a test message using the given messenger.
:param messenger_id: Messenger alias.
:param to: Recipient address (if applicable).
"""
messenger_obj = get_registered_messenger_object(messenger_id)
return messenger_obj.send_test_message(to=to, text='Test message from sitemessages.')
def check_undelivered(to: Optional[str] = None) -> int:
"""Sends a notification email if any undelivered dispatches.
Returns undelivered (failed) dispatches count.
:param to: Recipient address. If not set Django ADMINS setting is used.
"""
failed_count = Dispatch.objects.filter(dispatch_status=Dispatch.DISPATCH_STATUS_FAILED).count()
if failed_count:
from sitemessage.shortcuts import schedule_email
from sitemessage.messages.email import EmailTextMessage
if to is None:
admins = settings.ADMINS
if admins:
to = list(dict(admins).values())
if to:
priority = 999
register_message_types(EmailTextMessage)
schedule_email(
_('You have %(count)s undelivered dispatch(es) at %(url)s') % {
'count': failed_count,
'url': get_site_url(),
},
subject=_('[SITEMESSAGE] Undelivered dispatches'),
to=to, priority=priority)
send_scheduled_messages(priority=priority)
return failed_count
def cleanup_sent_messages(ago: Optional[int] = None, dispatches_only: bool = False):
"""Cleans up DB : removes delivered dispatches (and messages).
:param ago: Days. Allows cleanup messages sent X days ago. Defaults to None (cleanup all sent).
:param dispatches_only: Remove dispatches only (messages objects will stay intact).
"""
filter_kwargs = {
'dispatch_status': Dispatch.DISPATCH_STATUS_SENT,
}
objects = Dispatch.objects
if ago:
filter_kwargs['time_dispatched__lte'] = timezone.now() - timedelta(days=int(ago))
dispatch_map = dict(objects.filter(**filter_kwargs).values_list('pk', 'message_id'))
# Remove dispatches
objects.filter(pk__in=list(dispatch_map.keys())).delete()
if not dispatches_only:
# Remove messages also.
messages_ids = set(dispatch_map.values())
if messages_ids:
messages_blocked = set(chain.from_iterable(
objects.filter(message_id__in=messages_ids).values_list('message_id')))
messages_stale = messages_ids.difference(messages_blocked)
if messages_stale:
Message.objects.filter(pk__in=messages_stale).delete()
def prepare_dispatches() -> List[Dispatch]:
"""Automatically creates dispatches for messages without them."""
dispatches = []
target_messages = Message.get_without_dispatches()
cache = {}
for message_model in target_messages:
if message_model.cls not in cache:
message_cls = get_registered_message_type(message_model.cls)
subscribers = message_cls.get_subscribers()
cache[message_model.cls] = (message_cls, subscribers)
else:
message_cls, subscribers = cache[message_model.cls]
dispatches.extend(message_cls.prepare_dispatches(message_model))
return dispatches
def get_user_preferences_for_ui(
user: AbstractBaseUser,
message_filter: Optional[Callable] = None,
messenger_filter: Optional[Callable] = None,
new_messengers_titles: Optional[Dict[str, str]] = None
) -> Tuple[List[str], Mapping]:
"""Returns a two element tuple with user subscription preferences to render in UI.
Message types with the same titles are merged into one row.
First element:
A list of messengers titles.
Second element:
User preferences dictionary indexed by message type titles.
Preferences (dictionary values) are lists of tuples:
(preference_alias, is_supported_by_messenger_flag, user_subscribed_flag)
Example:
{'My message type': [('test_message|smtp', True, False), ...]}
:param user:
:param message_filter: A callable accepting a message object to filter out message types
:param messenger_filter: A callable accepting a messenger object to filter out messengers
:param new_messengers_titles: Mapping of messenger aliases to a new titles.
"""
if new_messengers_titles is None:
new_messengers_titles = {}
msgr_to_msg = defaultdict(set)
msg_titles = {}
msgr_titles = {}
for msgr in get_registered_messenger_objects().values():
if not (messenger_filter is None or messenger_filter(msgr)) or not msgr.allow_user_subscription:
continue
msgr_alias = msgr.alias
msgr_title = new_messengers_titles.get(msgr.alias) or msgr.title
for msg in get_registered_message_types().values():
if not (message_filter is None or message_filter(msg)) or not msg.allow_user_subscription:
continue
msgr_supported = msg.supported_messengers
is_supported = (not msgr_supported or msgr.alias in msgr_supported)
if not is_supported:
continue
msg_alias = msg.alias
msg_titles.setdefault(f'{msg.title}', []).append(msg_alias)
msgr_to_msg[msgr_alias].update((msg_alias,))
msgr_titles[msgr_title] = msgr_alias
def sort_titles(titles):
return dict(sorted([(k, v) for k, v in titles.items()], key=itemgetter(0)))
msgr_titles = sort_titles(msgr_titles)
user_prefs = {}
user_subscriptions = [
f'{pref.message_cls}{_ALIAS_SEP}{pref.messenger_cls}'
for pref in Subscription.get_for_user(user)]
for msg_title, msg_aliases in sort_titles(msg_titles).items():
for __, msgr_alias in msgr_titles.items():
msg_candidates = msgr_to_msg[msgr_alias].intersection(msg_aliases)
alias = ''
msg_supported = False
subscribed = False
if msg_candidates:
alias = f'{msg_candidates.pop()}{_ALIAS_SEP}{msgr_alias}'
msg_supported = True
subscribed = alias in user_subscriptions
user_prefs.setdefault(msg_title, []).append((alias, msg_supported, subscribed))
return list(msgr_titles.keys()), user_prefs
def set_user_preferences_from_request(request: HttpRequest) -> bool:
"""Sets user subscription preferences using data from a request.
Expects data sent by form built with `sitemessage_prefs_table` template tag.
Returns a flag, whether prefs were found in the request.
:param request:
"""
prefs = []
for pref in request.POST.getlist(_PREF_POST_KEY):
message_alias, messenger_alias = pref.split(_ALIAS_SEP)
try:
get_registered_message_type(message_alias)
get_registered_messenger_object(messenger_alias)
except (UnknownMessengerError, UnknownMessageTypeError):
pass
else:
prefs.append((message_alias, messenger_alias))
Subscription.replace_for_user(request.user, prefs)
return bool(prefs)
def get_sitemessage_urls() -> List:
"""Returns sitemessage urlpatterns, that can be attached to urlpatterns of a project:
# Example from urls.py.
from sitemessage.toolbox import get_sitemessage_urls
urlpatterns = patterns('',
# Your URL Patterns belongs here.
) + get_sitemessage_urls() # Now attaching additional URLs.
"""
url_unsubscribe = re_path(
r'^messages/unsubscribe/(?P<message_id>\d+)/(?P<dispatch_id>\d+)/(?P<hashed>[^/]+)/$',
unsubscribe,
name='sitemessage_unsubscribe'
)
url_mark_read = re_path(
r'^messages/ping/(?P<message_id>\d+)/(?P<dispatch_id>\d+)/(?P<hashed>[^/]+)/$',
mark_read,
name='sitemessage_mark_read'
)
return [url_unsubscribe, url_mark_read]
| bsd-3-clause | 8,768,792,772,091,165,000 | 32.359195 | 111 | 0.663968 | false | 4.073333 | false | false | false |
Xifax/suzu-web | src/api/jp/jisho.py | 1 | 3940 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Jisho.org semi-api
"""
import re
from collections import OrderedDict
from itertools import islice
import requests
from requests import RequestException
from bs4 import BeautifulSoup
class Jisho:
def __init__(self):
# Starting with
self.url = u'http://jisho.org/words?jap=%s&eng=&dict=edict'
# Any position
self.fuzzy_url = u'http://jisho.org/words?jap=*%s*&eng=&dict=edict'
# Details
self.details_url = u'http://jisho.org/kanji/details/%s'
def lookup(self, term, fuzzy=True):
"""Lookup term on Jisho"""
url = self.fuzzy_url if fuzzy else self.url
try:
return requests.get(url % term).content
except RequestException:
return ''
def complete(self, kanji):
"""Get words which include specified kanji"""
results = []
soup = BeautifulSoup(self.lookup(kanji), 'lxml')
for word in soup.find_all('span', {'class': 'kanji'}):
# get text from html element, strip spaces and tabs
word = word.get_text().strip()
# skip kanji itself
if word != kanji:
results.append(word)
return results
def define(self,
kanji,
limit=20,
skip_same_reading=False,
skip_same_meaning=False):
"""Get words with specified kanji + meaning + kana
Returns iterator.
"""
results = OrderedDict({})
soup = BeautifulSoup(self.lookup(kanji), 'lxml')
# Utility function to get specific row|column text
get_row = lambda row, column: row.find('td', column).get_text().strip()
columns = ['kanji_column', 'kana_column', 'meanings_column']
# Find rows with classes 'odd' and 'even'
for row in soup.find_all("tr", {"class": re.compile(r"^(odd|even)$")}):
# Skip 'lower' classes
if 'lower' in row['class']:
continue
# Get columns by names
word, kana, meaning = [get_row(row, column) for column in columns]
# Append to results if not the same kanji
if word != kanji:
results[word] = {'kana': kana, 'meaning': meaning}
# todo: filter results based on flags
# todo: may filter by the same meaning and kana
return islice(results.iteritems(), limit)
def details(self, word):
"""Get info for each kanji in word"""
details = {}
try:
data = BeautifulSoup(
requests.get(self.details_url % word).content, 'lxml'
)
for div in data.find_all('div', 'kanji_result'):
# Get kanji, its meanings and readings
kanji = div.find('h1', 'literal').get_text().strip()
meanings = div.find('div', 'english_meanings') \
.get_text(strip=True).replace('English meanings', '')
try:
kun, on = div.find('div', 'japanese_readings') \
.get_text().strip().split('\n')
names = u''
except ValueError:
kun, on, names = div.find('div', 'japanese_readings') \
.get_text().strip().split('\n')
details[kanji] = {
'meanings': meanings.replace(';', ', '),
'on': on.replace('Japanese on:', '').strip(),
'kun': kun.replace('Japanese kun:', '').strip(),
'names': names.replace('Japanese names:', '').strip()
}
except RequestException:
pass
return details
if __name__ == '__main__':
for item, value in Jisho().details(u'才凱旋').iteritems():
print item
for key, data in value.iteritems():
print key, data
| bsd-2-clause | 1,944,833,856,903,504,100 | 31.783333 | 79 | 0.523894 | false | 4.047325 | false | false | false |
alexisrolland/data-quality | api/init/security/oauth/google.py | 1 | 2553 | import json
import os
from flask import redirect, request, session
from flask_restplus import Namespace, Resource
from requests_oauthlib import OAuth2Session
from security.token import get_jwt_token, TokenType, get_token_redirect_response
# pylint: disable=unused-variable
# OAuth endpoints given in the Google API documentation
AUTHORIZATION_URI = 'https://accounts.google.com/o/oauth2/v2/auth'
TOKEN_URI = 'https://www.googleapis.com/oauth2/v4/token'
USER_PROFILE_URI = 'https://www.googleapis.com/oauth2/v1/userinfo'
SCOPE = ['https://www.googleapis.com/auth/userinfo.profile', 'https://www.googleapis.com/auth/userinfo.email']
# OAuth application configuration created on Google
client_id = os.environ['GOOGLE_CLIENT_ID']
client_secret = os.environ['GOOGLE_CLIENT_SECRET']
redirect_uri = os.environ['HOST_NAME'] + '/mobydq/api/v1/security/oauth/google/callback'
def get_user_info(google_session: object):
"""Gets user profile using OAuth session."""
user_profile = google_session.get(USER_PROFILE_URI).content.decode('utf-8')
user_profile = json.loads(user_profile)
return user_profile
def register_google_oauth(namespace: Namespace):
"""Registers all endpoints used for Google OAuth authentication."""
@namespace.route('/security/oauth/google')
@namespace.doc()
class GoogleOAuth(Resource):
"""Defines resource to redirect user to Google OAuth page."""
def get(self):
"""Redirects user to Google OAuth page."""
google_session = OAuth2Session(client_id, redirect_uri=redirect_uri, scope=SCOPE)
url, state = google_session.authorization_url(AUTHORIZATION_URI, access_type='offline', prompt='select_account')
# State is used to prevent CSRF, keep this for later.
session['oauth_state'] = state
return redirect(url)
@namespace.route('/security/oauth/google/callback')
@namespace.doc()
class GoogleOAuthCallback(Resource):
"""Defines resource to handle callback from Google OAuth."""
def get(self):
"""Handles Google OAuth callback and fetch user access token."""
google_session = OAuth2Session(client_id, redirect_uri=redirect_uri, scope=SCOPE)
token = google_session.fetch_token(TOKEN_URI, client_secret=client_secret, authorization_response=request.url)
user_info = get_user_info(google_session)
jwt = get_jwt_token(TokenType.GOOGLE, user_info['email'], user_info, token)
return get_token_redirect_response(jwt)
| apache-2.0 | 7,800,487,101,587,096,000 | 39.52381 | 124 | 0.705445 | false | 3.927692 | false | false | false |
jasset75/yapytex | yapytex/document.py | 1 | 3379 | from yapytex.dictutils import DictWrapper
from yapytex import latex_directives as xdir
from yapytex import styles
from yapytex import miscelanea as misc
from yapytex.abstract import YaPyTexBase
from yapytex.pieces import YaPyTexPiece, YaPyTexAppendix
#book layout
#https://en.wikipedia.org/wiki/Book_design
_d_misc_options = dict(
numbered = r'numbered',
pprint = r'print',
index = r'index'
)
misc_options = DictWrapper(_d_misc_options)
_default_doc_options = [
misc_options.numbered,
styles.font_sizes.sz12pt,
styles.font_families.times,
misc_options.pprint,
misc_options.index,
]
class Document(YaPyTexBase):
_appendices = []
_pre = []
_glossary = []
_acronym = []
_pieces = []
_title = None
_author = None
_language = xdir.default_language
_hook_load_packages = None
_type = 'article'
@property
def language(self):
return self._language
@language.setter
def language(self):
return self._language
@property
def title(self):
return self._title
@title.setter
def title(self,title):
self._title = title
@property
def author(self):
return self._author
@property
def hook_load_packages(self):
return self._hook_load_packages
@hook_load_packages.setter
def hook_load_packages(self,hook):
self._hook_load_packages = hook
@author.setter
def author(self,author):
self._author = author
def add(self, piece):
if not isinstance(piece,YaPyTexPiece):
raise Exception('Piece argument must be YaPyTexPiece instance.')
self._pieces.append(piece)
def add_appendix(self, appendix):
if not isinstance(appendix,YaPyTexAppendix):
raise Exception('Appendix argument must be YaPyTexAppendix instance.')
self._appendices.append(appendix)
def build(self,ttype):
pre_header = [
xdir.doc_class.format(','.join(_default_doc_options),ttype),
xdir.useinputenc,
xdir.usenumerate,
xdir.usehyperref,
] + self._pre
if self._hook_load_packages:
self._hook_load_packages(pre_header)
if self._language is 'es_ES':
pre_header.append(xdir.es_ES)
if self._title:
pre_header.append(xdir.doc_title.format(self._title))
if self._author:
pre_header.append(xdir.doc_author.format(self._author))
if xdir.useglossaries in pre_header and len(self._glossary) > 0:
pre_header.append(xdir.make_glossaries)
pre_header.append(xdir.gls_entry_italic)
header = []
#document's begin
header.append(xdir.doc_begin)
post_header = []
if self._title:
post_header.append(xdir.maketitle)
post_header.append(xdir.cleardoublepage)
post_header.append(xdir.tableofcontents)
pieces = map(misc.format,self._pieces)
backmatter = [xdir.backmatter]
backmatter.append('\n'.join(map(misc.format,self._appendices)))
if xdir.useglossaries in pre_header and len(self._glossary) > 0:
backmatter.append(xdir.print_glossaries)
if xdir.useglossaries in pre_header and len(self._acronym) > 0:
backmatter.append(xdir.print_acronyms)
pre_header.extend(self._glossary)
pre_header.extend(self._acronym)
#this line may be the last of directives
backmatter.append(xdir.doc_end)
return \
'\n'.join(pre_header)+\
'\n'.join(header)+\
'\n'.join(post_header)+\
'\n'.join(pieces)+\
'\n'.join(backmatter) | mit | 6,713,230,459,883,180,000 | 25.40625 | 76 | 0.681267 | false | 3.175752 | false | false | false |
maur1th/naxos-project | app/forum/user/migrations/0001_initial.py | 2 | 6509 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.10 on 2016-10-21 23:30
from __future__ import unicode_literals
import datetime
from django.conf import settings
import django.contrib.auth.models
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0007_alter_validators_add_error_messages'),
('forum', '0001_initial'),
('pm', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ForumUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=30, unique=True, validators=[django.core.validators.RegexValidator('^[\\w.@+-]+$', 'Enter a valid username. This value may contain only letters, numbers and @/./+/-/_ characters.')], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=30, verbose_name='last name')),
('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('emailVisible', models.BooleanField(default=False, verbose_name='E-mail visible')),
('subscribeToEmails', models.BooleanField(default=True, verbose_name='Mailing-list')),
('mpEmailNotif', models.BooleanField(default=False, verbose_name='Notification des MP par e-mail')),
('showSmileys', models.BooleanField(default=False, verbose_name='Affichage des smileys par defaut')),
('fullscreen', models.BooleanField(default=False, verbose_name="Utilisation de la largeur de l'écran")),
('showLogosOnSmartphone', models.BooleanField(default=True, verbose_name='Afficher les logos sur smartphone')),
('logo', models.ImageField(blank=True, upload_to='logo')),
('quote', models.CharField(blank=True, max_length=50, verbose_name='Citation')),
('website', models.URLField(blank=True, verbose_name='Site web')),
('pmUnreadCount', models.IntegerField(default=0)),
('resetDateTime', models.DateTimeField(default=datetime.datetime(2013, 1, 1, 0, 0))),
('is_online', models.BooleanField(default=False)),
('last_seen', models.DateTimeField(blank=True, null=True)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('pmReadCaret', models.ManyToManyField(blank=True, to='pm.Message')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'ordering': ['pk'],
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='Bookmark',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timestamp', models.DateTimeField(auto_now=True)),
('thread', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='bookmarks', to='forum.Thread')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='bookmarks', to=settings.AUTH_USER_MODEL)),
],
options={
'get_latest_by': 'timestamp',
},
),
migrations.CreateModel(
name='CategoryTimeStamp',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timestamp', models.DateTimeField(auto_now=True)),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='forum.Category')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='categoryTimeStamps', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='TokenPool',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('token', models.CharField(max_length=50, unique=True)),
],
),
migrations.AlterUniqueTogether(
name='categorytimestamp',
unique_together=set([('user', 'category')]),
),
migrations.AlterIndexTogether(
name='categorytimestamp',
index_together=set([('user', 'category')]),
),
migrations.AlterUniqueTogether(
name='bookmark',
unique_together=set([('user', 'thread')]),
),
migrations.AlterIndexTogether(
name='bookmark',
index_together=set([('user', 'thread')]),
),
]
| gpl-3.0 | 4,141,799,545,695,767,000 | 59.82243 | 421 | 0.615857 | false | 4.330007 | false | false | false |
hwstar/chargectrlr-python-buspirate | chargerctrl/Dialog.py | 1 | 2812 | __author__ = 'srodgers'
"""
This file is part of chargectrl-python-buspirate.
chargectrl-python-buspirate is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Foobar is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with chargectrl-python-buspirate. If not, see <http://www.gnu.org/licenses/>.
"""
from tkinter import *
from tkinter.ttk import *
class Dialog(Toplevel):
def __init__(self, parent, title = None, xoffset = 50, yoffset = 50):
Toplevel.__init__(self, parent)
self.transient(parent)
if title:
self.title(title)
self.parent = parent
self.result = None
body = Frame(self)
self.initial_focus = self.body(body)
body.pack(padx=5, pady=5)
self.buttonbox()
self.grab_set()
if not self.initial_focus:
self.initial_focus = self
self.protocol("WM_DELETE_WINDOW", self.cancel)
self.geometry("+%d+%d" % (parent.winfo_rootx()+xoffset,
parent.winfo_rooty()+yoffset))
self.initial_focus.focus_set()
self.wait_window(self)
#
# construction hooks
def body(self, master):
# create dialog body. return widget that should have
# initial focus. this method should be overridden
pass
def buttonbox(self):
# add standard button box. override if you don't want the
# standard buttons
box = Frame(self)
w = Button(box, text="OK", width=10, command=self.ok, default=ACTIVE)
w.pack(side=LEFT, padx=5, pady=5)
w = Button(box, text="Cancel", width=10, command=self.cancel)
w.pack(side=LEFT, padx=5, pady=5)
self.bind("<Return>", self.ok)
self.bind("<Escape>", self.cancel)
box.pack()
#
# standard button semantics
def ok(self, event=None):
if not self.validate():
self.initial_focus.focus_set() # put focus back
return
self.withdraw()
self.update_idletasks()
self.apply()
self.cancel()
def cancel(self, event=None):
# put focus back to the parent window
self.parent.focus_set()
self.destroy()
#
# command hooks
def validate(self):
return 1 # override
def apply(self):
pass # override
| gpl-3.0 | -3,948,247,585,451,378,700 | 23.241379 | 88 | 0.608464 | false | 4.017143 | false | false | false |
gabstopper/smc-python | smc/examples/virtual_engines.py | 1 | 4198 | '''
Example of automating the creation of a L3 Master Engine or Master Engine Cluster,
configuring interfaces and creating the virtual engines.
'''
from smc import session
from smc.core.engine import Engine
from smc.core.engines import Layer3VirtualEngine, MasterEngineCluster, MasterEngine
def create_single_master_engine():
"""
Create a single master engine instance example
:raises: `smc.api.exceptions.CreateEngineFailed`
"""
MasterEngine.create('api-master',
mgmt_ip='1.1.1.1',
mgmt_netmask='1.1.1.0/24',
master_type='firewall',
domain_server_address=['8.8.4.4', '7.7.7.7'])
# Returns smc.core.engine.Engine instance
def create_cluster_master_engine():
"""
Create the master engine cluster with 2 nodes example
Nodes data structure is:
[{'address': '', #ip address of node
'network_value': '', #network/cidr
'nodeid': 1}] #node identifier, 1, 2, 3, etc
:raises: `smc.api.exceptions.CreateEngineFailed`
"""
MasterEngineCluster.create(
name='engine-cluster',
master_type='firewall',
macaddress='22:22:22:22:22:22',
nodes=[{'address': '5.5.5.2',
'network_value': '5.5.5.0/24',
'nodeid': 1},
{'address': '5.5.5.3',
'network_value': '5.5.5.0/24',
'nodeid': 2}])
# Returns smc.core.engine.Engine instance
def delete(ve, master_engine):
# Delete easily. Just load the engine resource and call delete. Delete VE's first.
# All elements descend from smc.base.model.Element
# Note: Most operations will return an instance of smc.api.web.SMCResult so you
# can view the return attributes if necessary.
ve = Engine('layer3-ve').load()
ve.delete()
master = Engine('engine-cluster').load()
master.delete()
if __name__ == '__main__':
session.login(url='https://172.18.1.25:8082', api_key='avUj6vFZTUSZ7sr8mNsP0001', timeout=120,
verify=False)
create_cluster_master_engine()
# Load the existing master engine named 'master-eng'
engine = Engine('engine-cluster').load()
# Create a virtual resource named 've-1' with virtual firewall id 1
# vfw_id should increment by 1 for each new virtual firewall under the
# same Master Engine
print engine.virtual_resource.create(name='ve-1', vfw_id=1)
# Example of allocating the entire physical interface to a virtual engine
# Create the interface mapping to virtual resource 've-1'
print engine.physical_interface.add(interface_id=1,
virtual_mapping=0,
virtual_resource_name='ve-1')
# Example of allocating a VLAN to a virtual engine on a specific physical
# interface.
# engine.physical_interface.add_vlan_to_node_interface(
# interface_id=1,
# vlan_id=100,
# virtual_mapping=0,
# virtual_resource_name='ve-1')
# Virtual Engine interface mappings start at 0 (interface_id) regardless of the
# real interface index on the master engine. The interface_id for the virtual engine
# will start it's numbering at index 0 and increment by 1 for each interface allocated.
# The interface_id field correlates to the "Virtual Engine Interface ID" property of
# the master engine's physical interface.
#:raises: `smc.api.exceptions.CreateEngineFailed`
Layer3VirtualEngine.create('layer3-ve',
master_engine='engine-cluster',
virtual_resource='ve-1',
interfaces=[{'interface_id': 0,
'address': '1.1.1.1',
'network_value': '1.1.1.0/24'}])
# Returns smc.core.engine.Layer3VirtualEngine instance
# delete()
session.logout()
| apache-2.0 | 1,223,394,019,031,582,700 | 40.156863 | 98 | 0.577418 | false | 4.160555 | false | false | false |
pashinin-com/pashinin.com | src/core/files/views.py | 1 | 9340 | import json
import os
from core.views import BaseView
from .sendfile import send_file
from .models import BaseFile as File, UploadedFile
from core import now
from .tasks import ensure_fs_ready
from .forms import UploadedFileForm
from django.http import HttpResponse, HttpResponseNotFound, JsonResponse
# from django.db.models import Count
from django.conf import settings
# from django.utils.translation import gettext_lazy as _
# from django.core.urlresolvers import reverse
from braces import views
from os.path import isfile, isdir, join
from . import files_used_in_this_repo
import logging
log = logging.getLogger(__name__)
class FileView(BaseView):
"""Return a file with permission checking"""
def get(self, request, **kwargs):
"""If "d" is present in query string - make an attachment (always
download, not view in browser)
"""
is_attachment = 'd' in self.request.GET
sha1 = kwargs.get('sha1', None)
try:
f = File.objects.get(sha1=sha1)
return send_file(f, attachment=is_attachment)
except File.DoesNotExist:
#
# If there is no info in a DB about this file return file
# anyway (if exists) and then run a task to process file and
# add to DB.
#
roots = settings.FILES_ROOT
if isinstance(roots, str):
roots = [settings.FILES_ROOT]
for root in roots:
filename = os.path.join(
root,
sha1[:3],
sha1[3:6],
sha1[6:]
)
if os.path.isfile(filename):
# TODO: run task to add file info to DB
return send_file(filename, attachment=is_attachment)
# else:
# return HttpResponseNotFound(
# "No such file {}".format(
# filename if settings.DEBUG else ""))
else:
return HttpResponseNotFound(
"No such file {}".format(
filename if settings.DEBUG else ""))
# c['uploads'] = UploadFile.objects.all().annotate(
# null_position=Count('date_uploaded')).order_by('-null_position',
# '-date_uploaded')
class DownloadCore(
views.LoginRequiredMixin,
views.SuperuserRequiredMixin,
BaseView,
):
def post(self, request, **kwargs):
if not settings.DEBUG:
return HttpResponse('[]', content_type='application/json')
ensure_fs_ready()
files = files_used_in_this_repo() # sha1 list
for f in files:
url = 'https://pashinin.com/_/files/{}'.format(f)
File.from_url(url)
return HttpResponse(json.dumps({
'dir': settings.REPO_PATH,
'files': json.dumps(files),
'len': len(files)
}), content_type='application/json')
class Upload(BaseView):
def post(self, request, **kwargs):
ensure_fs_ready()
ctx = self.get_context_data(**kwargs)
user = ctx['user']
form = UploadedFileForm(request.POST, request.FILES)
files = []
for field in request.FILES:
log.debug(request.FILES[field])
new_file = UploadedFile(file=request.FILES[field])
# Uploader should have access to his file
# Save it in a session for Anons
if user.is_anonymous:
if 'files' not in request.session:
# request.session['files'] = [upload_md5]
pass
else:
# request.session['files'] += [upload_md5]
pass
else:
new_file.uploader = user
new_file.save()
from .tasks import move_upload_to_files
f = move_upload_to_files(new_file)
files.append({'sha1': f.sha1})
# 'BaseFile' object has no attribute 'uploader'
# if not f.uploader and user is not None and not user.is_anonymous:
# f.uploader = user
# f.save()
# TODO: optimize uploaded JPGs
#
# jpegtran -copy none -optimize -perfect inputimage.jpg >
# outputimage.jpg
# user.avatar = Image.from_file(f)
return JsonResponse({'files': files})
return HttpResponse(json.dumps({'errors': ['asd']}),
content_type='application/json')
if form.is_valid():
new_file = UploadedFile(file=request.FILES['file'])
# Uploader should have access to his file
# Save it in a session for Anons
if user.is_anonymous:
if 'files' not in request.session:
# request.session['files'] = [upload_md5]
pass
else:
# request.session['files'] += [upload_md5]
pass
else:
new_file.uploader = user
new_file.save()
from .tasks import move_upload_to_files
f = move_upload_to_files(new_file)
# 'BaseFile' object has no attribute 'uploader'
# if not f.uploader and user is not None and not user.is_anonymous:
# f.uploader = user
# f.save()
# TODO: optimize uploaded JPGs
#
# jpegtran -copy none -optimize -perfect inputimage.jpg >
# outputimage.jpg
# user.avatar = Image.from_file(f)
return HttpResponse(json.dumps({
'url': f.get_absolute_url(),
'id': f.pk,
'sha1': f.sha1
}), content_type='application/json')
else: # upload form is not valid
return HttpResponse(json.dumps({'errors': form.errors}),
content_type='application/json')
for key in request.FILES:
f = request.FILES[key]
upload = UploadedFile(file=f)
# upload_md5 = upload.get_md5()
upload.date_uploaded = now()
if f.multiple_chunks(): # file is already on disk
upload.save()
else:
# check if this md5 was already uploaded
prevUpload = upload.uploaded_earlier
if prevUpload:
upload = prevUpload
else:
upload.save()
# The file is uploaded, it is now for example:
#
# /mnt/files/uploads/2016/06/28/dropzone_NaLkPzK.css
#
# Upload directory:
# MEDIA_ROOT = os.path.join(FILES_ROOT, 'uploads/')
#
# TODO: process uploaded file
# upload.process()
return HttpResponse(json.dumps({'url': upload.url}))
class Files(
views.LoginRequiredMixin,
views.SuperuserRequiredMixin,
BaseView
):
"""Files management (admin view)"""
template_name = "core_files.jinja"
def get_context_data(self, **kwargs):
c = super().get_context_data(**kwargs)
c['files_count'] = File.objects.count()
c['files'] = File.objects.filter().order_by('-added')[:10]
c['dropzone'] = True
c['timeago'] = True
# Check mountpoint
import psutil
c['mounted'] = False
for p in psutil.disk_partitions(True):
if p.mountpoint == settings.FILES_ROOT:
c['mounted'] = True
break
return c
class DirView(BaseView):
template_name = "cms_files_dirview.jinja"
dir = None
def get_context_data(self, **kwargs):
"""asd
levels - ['path', 'to', 'dir']
paths - ['/path', '/path/to', '/path/to/dir']
"""
c = super().get_context_data(**kwargs)
c['files'] = []
c['dirs'] = []
path = kwargs.get('path', '')
if path is None:
path = ''
path = path.strip('/')
c['levels'] = path.split('/')
# c['levels'][0] = '/'+c['levels'][0]
c['paths'] = list(c['levels'])
print(c['paths'])
try:
c['levels'].remove('')
except ValueError:
pass
for i, el in enumerate(c['levels']):
sublist = c['levels'][0:i+1]
c['paths'][i] = '/'+os.path.join(*sublist)
# c['paths'][0] = '/'+c['paths'][0]
j = os.path.join(self.dir, path)
# log.debug((self.dir, path))
d = c['dir'] = os.path.realpath(j)
if self.dir is None or \
not isdir(self.dir):
log.error('DirView: No dir {}'.format(self.dir))
return c
if isdir(d):
L = os.listdir(d)
c['dirs'] = sorted([f for f in L if isdir(join(d, f))])
c['files'] = sorted([f for f in L if isfile(join(d, f))])
for ind, f in enumerate(c['files']):
c['files'][ind] = {
'name': f,
'size': os.path.getsize(join(d, f))
}
elif os.path.isfile(d):
c['f'] = d
else:
log.error('DirView 404: No dir {}'.format(d))
c['status'] = 404
return c
| gpl-3.0 | 2,884,424,129,055,935,000 | 31.657343 | 79 | 0.512848 | false | 4.167782 | false | false | false |
Unknowncmbk/Thumly | thumly/user.py | 1 | 2868 | import MySQLdb
import credentials
class User(object):
def __init__(self, email, password):
self.email = email
self.password = password
def __str__(self):
return "email: " + str(self.email) + "password: " + str(self.password)
def save(self):
"""
Saves this User to the database.
"""
# Get new database instance
db = credentials.getDatabase()
cur = db.cursor()
query = '''INSERT IGNORE INTO users (email, password)
VALUES(%s, %s);'''
data = (self.email, self.password)
cur.execute(query, data)
# commit query
db.commit()
db.close()
return True
def verify(self):
"""
Returns:
True if the user's credentials match the database. False otherwise.
"""
if isUniqueEmail(self.email) == False:
# Get new database instance
db = credentials.getDatabase()
cur = db.cursor()
query = '''SELECT email, password FROM users WHERE email = %s;'''
cur.execute(query, self.email)
em = ""
ps = ""
value = False
for tup in cur:
em = tup[0]
ps = tup[1]
if em == self.email and ps == self.password:
value = True
# commit query
db.commit()
db.close()
return value
return False
def load(email):
'''
Args:
email: The email to query.
Returns:
A user given the email.
'''
# Get new database instance
db = credentials.getDatabase()
cur = db.cursor()
query = '''SELECT * FROM users WHERE email = %s;'''
cur.execute(query,email)
user = ""
for tup in cur:
user = User(tup[0], tup[1])
# commit query
db.commit()
db.close()
return user
def loadAll():
'''
Returns:
A list of all users.
'''
# Get new database instance
db = credentials.getDatabase()
cur = db.cursor()
query = '''SELECT email, password FROM users;'''
cur.execute(query)
users = []
for tup in cur:
users.append(User(tup[0], tup[1]))
# commit query
db.commit()
db.close()
return users
def addUser(email, password):
'''
Saves a new user to the database.
'''
User(email,password).save()
def isUniqueEmail(email):
'''
Args:
email: the email to query
Returns:
True if the email does not yet exist in the database.
'''
# Get new database instance
db = credentials.getDatabase()
cur = db.cursor()
query = '''SELECT COUNT(*) FROM users WHERE email =%s;'''
cur.execute(query, email)
count = 0
for tup in cur:
count = tup[0]
return count == 0 | mit | -7,791,856,545,844,101,000 | 20.410448 | 79 | 0.521967 | false | 4.180758 | false | false | false |
gecos-team/gecosws-config-assistant | gecosws_config_assistant/view/NTPServerElemView.py | 1 | 4317 | # -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
# This file is part of Guadalinex
#
# This software is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this package; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
__author__ = "Abraham Macias Paredes <[email protected]>"
__copyright__ = "Copyright (C) 2015, Junta de Andalucía" + \
"<[email protected]>"
__license__ = "GPL-2"
import logging
import gettext
from gettext import gettext as _
from gecosws_config_assistant.view.GladeWindow import GladeWindow
from gecosws_config_assistant.dto.NTPServer import NTPServer
from gecosws_config_assistant.view.CommonDialog import (
showwarning_gtk, showinfo_gtk)
gettext.textdomain('gecosws-config-assistant')
class NTPServerElemView(GladeWindow):
'''
Dialog class that shows the a NTP server element.
'''
def __init__(self, parent, mainController):
'''
Constructor
'''
self.parent = parent
self.controller = mainController
self.logger = logging.getLogger('NTPServerElemView')
self.gladepath = 'ntp.glade'
self.data = None
self.displaySuccess = True
self.initUI()
def get_data(self):
''' Getter data '''
return self.__data
def set_data(self, value):
''' Setter data '''
self.__data = value
def initUI(self):
''' Initialize UI '''
self.buildUI(self.gladepath)
def addHandlers(self):
''' Adding handlers '''
self.logger.debug("Adding all handlers")
self.handlers = self.parent.get_common_handlers()
# add new handlers here
self.logger.debug("Adding check ntp connection")
self.handlers["onChek"] = self.test
self.logger.debug("Adding OK handler")
self.handlers["onOOKK"] = self.accept
self.logger.debug("Adding back handler")
self.handlers["onBack"] = self.goBack
def show(self):
''' Show '''
self.logger.debug("Show")
data = self.get_data()
if data is not None:
self.getElementById('ntp_server_entry') \
.set_text(data.get_address())
self.parent.navigate(self)
def goBack(self, *args):
''' Go back '''
self.logger.debug("Go back")
self.controller.mainWindowController.backToMainWindowDialog()
def accept(self, *args):
''' Accept '''
self.logger.debug("Accept")
if self.get_data() is None:
self.set_data(NTPServer())
self.get_data().set_address(
self.getElementById('ntp_server_entry').get_text())
self.displaySuccess = False
if self.test(False):
self.displaySuccess = True
self.controller.save()
self.displaySuccess = True
def test(self, *args):
''' Testing purposes '''
self.logger.debug("test")
if self.get_data() is None:
self.set_data(NTPServer())
self.get_data().set_address(
self.getElementById('ntp_server_entry').get_text())
self.logger.debug("test: %s", self.get_data().get_address())
result = self.controller.test()
if not result:
showwarning_gtk(
_("Can't connect with NTP server.\n" +
"Please double-check the NTP server address"),
self)
elif self.displaySuccess:
showinfo_gtk(_("NTP server connection successful"), self)
return result
def cancel(self, *args):
''' Cancel '''
self.logger.debug("cancel")
self.controller.hide()
data = property(
get_data,
set_data,
None,
None)
| gpl-2.0 | -1,983,980,445,662,167,300 | 28.360544 | 74 | 0.617006 | false | 3.948765 | true | false | false |
greysAcademicCode/i-v-vs-time-taker | ivSweeperUI.py | 1 | 23945 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ivSweeper.ui'
#
# Created: Thu Nov 27 00:52:45 2014
# by: PyQt4 UI code generator 4.9.6
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_IVSweeper(object):
def setupUi(self, IVSweeper):
IVSweeper.setObjectName(_fromUtf8("IVSweeper"))
IVSweeper.resize(622, 515)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(IVSweeper.sizePolicy().hasHeightForWidth())
IVSweeper.setSizePolicy(sizePolicy)
self.centralwidget = QtGui.QWidget(IVSweeper)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.addressGroup = QtGui.QGroupBox(self.centralwidget)
self.addressGroup.setGeometry(QtCore.QRect(10, 0, 281, 50))
self.addressGroup.setToolTip(_fromUtf8(""))
self.addressGroup.setStatusTip(_fromUtf8(""))
self.addressGroup.setAutoFillBackground(False)
self.addressGroup.setObjectName(_fromUtf8("addressGroup"))
self.instrumentCombo = QtGui.QComboBox(self.addressGroup)
self.instrumentCombo.setGeometry(QtCore.QRect(10, 20, 260, 20))
self.instrumentCombo.setObjectName(_fromUtf8("instrumentCombo"))
self.instrumentCombo.addItem(_fromUtf8(""))
self.sweepButton = QtGui.QPushButton(self.centralwidget)
self.sweepButton.setEnabled(False)
self.sweepButton.setGeometry(QtCore.QRect(520, 430, 90, 30))
self.sweepButton.setCheckable(False)
self.sweepButton.setAutoDefault(False)
self.sweepButton.setObjectName(_fromUtf8("sweepButton"))
self.terminalsGroup = QtGui.QGroupBox(self.centralwidget)
self.terminalsGroup.setGeometry(QtCore.QRect(300, 0, 150, 50))
self.terminalsGroup.setObjectName(_fromUtf8("terminalsGroup"))
self.frontRadio = QtGui.QRadioButton(self.terminalsGroup)
self.frontRadio.setGeometry(QtCore.QRect(10, 20, 60, 20))
self.frontRadio.setChecked(True)
self.frontRadio.setObjectName(_fromUtf8("frontRadio"))
self.rearRadio = QtGui.QRadioButton(self.terminalsGroup)
self.rearRadio.setGeometry(QtCore.QRect(80, 20, 60, 20))
self.rearRadio.setObjectName(_fromUtf8("rearRadio"))
self.wiresGroup = QtGui.QGroupBox(self.centralwidget)
self.wiresGroup.setGeometry(QtCore.QRect(430, 50, 180, 50))
self.wiresGroup.setObjectName(_fromUtf8("wiresGroup"))
self.twowireRadio = QtGui.QRadioButton(self.wiresGroup)
self.twowireRadio.setGeometry(QtCore.QRect(10, 10, 91, 40))
self.twowireRadio.setChecked(True)
self.twowireRadio.setObjectName(_fromUtf8("twowireRadio"))
self.fourwireRadio = QtGui.QRadioButton(self.wiresGroup)
self.fourwireRadio.setGeometry(QtCore.QRect(100, 10, 80, 40))
self.fourwireRadio.setObjectName(_fromUtf8("fourwireRadio"))
self.sweepGroup = QtGui.QGroupBox(self.centralwidget)
self.sweepGroup.setGeometry(QtCore.QRect(10, 100, 390, 130))
self.sweepGroup.setStatusTip(_fromUtf8(""))
self.sweepGroup.setObjectName(_fromUtf8("sweepGroup"))
self.label = QtGui.QLabel(self.sweepGroup)
self.label.setGeometry(QtCore.QRect(10, 20, 41, 20))
self.label.setObjectName(_fromUtf8("label"))
self.label_2 = QtGui.QLabel(self.sweepGroup)
self.label_2.setGeometry(QtCore.QRect(10, 50, 41, 20))
self.label_2.setObjectName(_fromUtf8("label_2"))
self.reverseButton = QtGui.QPushButton(self.sweepGroup)
self.reverseButton.setGeometry(QtCore.QRect(10, 90, 111, 30))
self.reverseButton.setObjectName(_fromUtf8("reverseButton"))
self.endSpin = QtGui.QSpinBox(self.sweepGroup)
self.endSpin.setGeometry(QtCore.QRect(50, 50, 91, 20))
self.endSpin.setMinimum(-20000)
self.endSpin.setMaximum(20000)
self.endSpin.setProperty("value", 1000)
self.endSpin.setObjectName(_fromUtf8("endSpin"))
self.startSpin = QtGui.QSpinBox(self.sweepGroup)
self.startSpin.setGeometry(QtCore.QRect(50, 20, 91, 20))
self.startSpin.setMinimum(-20000)
self.startSpin.setMaximum(20000)
self.startSpin.setProperty("value", -1000)
self.startSpin.setObjectName(_fromUtf8("startSpin"))
self.totalPointsSpin = QtGui.QSpinBox(self.sweepGroup)
self.totalPointsSpin.setGeometry(QtCore.QRect(230, 20, 71, 20))
self.totalPointsSpin.setAccelerated(False)
self.totalPointsSpin.setSuffix(_fromUtf8(""))
self.totalPointsSpin.setMinimum(1)
self.totalPointsSpin.setMaximum(2000)
self.totalPointsSpin.setProperty("value", 100)
self.totalPointsSpin.setObjectName(_fromUtf8("totalPointsSpin"))
self.label_3 = QtGui.QLabel(self.sweepGroup)
self.label_3.setGeometry(QtCore.QRect(150, 20, 81, 20))
self.label_3.setObjectName(_fromUtf8("label_3"))
self.label_6 = QtGui.QLabel(self.sweepGroup)
self.label_6.setGeometry(QtCore.QRect(150, 50, 71, 20))
self.label_6.setObjectName(_fromUtf8("label_6"))
self.autoAdvance = QtGui.QCheckBox(self.sweepGroup)
self.autoAdvance.setEnabled(False)
self.autoAdvance.setGeometry(QtCore.QRect(130, 110, 131, 20))
self.autoAdvance.setObjectName(_fromUtf8("autoAdvance"))
self.deltaStep = QtGui.QLabel(self.sweepGroup)
self.deltaStep.setGeometry(QtCore.QRect(310, 20, 81, 20))
self.deltaStep.setObjectName(_fromUtf8("deltaStep"))
self.maxPowerCheck = QtGui.QCheckBox(self.sweepGroup)
self.maxPowerCheck.setGeometry(QtCore.QRect(130, 90, 111, 20))
self.maxPowerCheck.setObjectName(_fromUtf8("maxPowerCheck"))
self.delaySpinBox = QtGui.QDoubleSpinBox(self.sweepGroup)
self.delaySpinBox.setGeometry(QtCore.QRect(230, 50, 71, 20))
self.delaySpinBox.setDecimals(3)
self.delaySpinBox.setMaximum(1000.0)
self.delaySpinBox.setSingleStep(0.01)
self.delaySpinBox.setProperty("value", 0.02)
self.delaySpinBox.setObjectName(_fromUtf8("delaySpinBox"))
self.totalLabel = QtGui.QLabel(self.sweepGroup)
self.totalLabel.setGeometry(QtCore.QRect(310, 50, 81, 20))
self.totalLabel.setObjectName(_fromUtf8("totalLabel"))
self.daqGroup = QtGui.QGroupBox(self.centralwidget)
self.daqGroup.setGeometry(QtCore.QRect(410, 100, 200, 131))
self.daqGroup.setObjectName(_fromUtf8("daqGroup"))
self.zeroCheck = QtGui.QCheckBox(self.daqGroup)
self.zeroCheck.setGeometry(QtCore.QRect(10, 110, 80, 22))
self.zeroCheck.setObjectName(_fromUtf8("zeroCheck"))
self.speedCombo = QtGui.QComboBox(self.daqGroup)
self.speedCombo.setGeometry(QtCore.QRect(80, 50, 110, 20))
self.speedCombo.setObjectName(_fromUtf8("speedCombo"))
self.speedCombo.addItem(_fromUtf8(""))
self.speedCombo.addItem(_fromUtf8(""))
self.speedCombo.addItem(_fromUtf8(""))
self.speedCombo.addItem(_fromUtf8(""))
self.label_7 = QtGui.QLabel(self.daqGroup)
self.label_7.setGeometry(QtCore.QRect(10, 50, 62, 20))
self.label_7.setObjectName(_fromUtf8("label_7"))
self.averageSpin = QtGui.QSpinBox(self.daqGroup)
self.averageSpin.setEnabled(True)
self.averageSpin.setGeometry(QtCore.QRect(130, 80, 60, 20))
self.averageSpin.setMinimum(0)
self.averageSpin.setMaximum(100)
self.averageSpin.setObjectName(_fromUtf8("averageSpin"))
self.label_8 = QtGui.QLabel(self.daqGroup)
self.label_8.setGeometry(QtCore.QRect(10, 80, 121, 20))
self.label_8.setObjectName(_fromUtf8("label_8"))
self.saveModeCombo = QtGui.QComboBox(self.daqGroup)
self.saveModeCombo.setGeometry(QtCore.QRect(110, 20, 80, 20))
self.saveModeCombo.setObjectName(_fromUtf8("saveModeCombo"))
self.saveModeCombo.addItem(_fromUtf8(""))
self.saveModeCombo.addItem(_fromUtf8(""))
self.label_10 = QtGui.QLabel(self.daqGroup)
self.label_10.setGeometry(QtCore.QRect(10, 20, 90, 20))
self.label_10.setObjectName(_fromUtf8("label_10"))
self.complianceGroup = QtGui.QGroupBox(self.centralwidget)
self.complianceGroup.setGeometry(QtCore.QRect(190, 50, 230, 50))
self.complianceGroup.setObjectName(_fromUtf8("complianceGroup"))
self.complianceSpin = QtGui.QSpinBox(self.complianceGroup)
self.complianceSpin.setGeometry(QtCore.QRect(120, 20, 90, 20))
self.complianceSpin.setMinimum(1)
self.complianceSpin.setMaximum(1000)
self.complianceSpin.setProperty("value", 10)
self.complianceSpin.setObjectName(_fromUtf8("complianceSpin"))
self.label_9 = QtGui.QLabel(self.complianceGroup)
self.label_9.setGeometry(QtCore.QRect(10, 20, 90, 20))
self.label_9.setObjectName(_fromUtf8("label_9"))
self.modeGroup = QtGui.QGroupBox(self.centralwidget)
self.modeGroup.setGeometry(QtCore.QRect(10, 50, 170, 50))
self.modeGroup.setObjectName(_fromUtf8("modeGroup"))
self.sourceVRadio = QtGui.QRadioButton(self.modeGroup)
self.sourceVRadio.setEnabled(True)
self.sourceVRadio.setGeometry(QtCore.QRect(10, 20, 60, 22))
self.sourceVRadio.setChecked(True)
self.sourceVRadio.setObjectName(_fromUtf8("sourceVRadio"))
self.sourceIRadio = QtGui.QRadioButton(self.modeGroup)
self.sourceIRadio.setEnabled(True)
self.sourceIRadio.setGeometry(QtCore.QRect(90, 20, 70, 22))
self.sourceIRadio.setObjectName(_fromUtf8("sourceIRadio"))
self.progress = QtGui.QProgressBar(self.centralwidget)
self.progress.setGeometry(QtCore.QRect(10, 430, 510, 30))
self.progress.setProperty("value", 0)
self.progress.setInvertedAppearance(False)
self.progress.setObjectName(_fromUtf8("progress"))
self.sweepContinuallyGroup = QtGui.QGroupBox(self.centralwidget)
self.sweepContinuallyGroup.setGeometry(QtCore.QRect(10, 230, 390, 70))
self.sweepContinuallyGroup.setCheckable(True)
self.sweepContinuallyGroup.setChecked(False)
self.sweepContinuallyGroup.setObjectName(_fromUtf8("sweepContinuallyGroup"))
self.label_4 = QtGui.QLabel(self.sweepContinuallyGroup)
self.label_4.setGeometry(QtCore.QRect(10, 10, 120, 20))
self.label_4.setObjectName(_fromUtf8("label_4"))
self.scanRecoverySpin = QtGui.QDoubleSpinBox(self.sweepContinuallyGroup)
self.scanRecoverySpin.setGeometry(QtCore.QRect(130, 10, 81, 20))
self.scanRecoverySpin.setMaximum(1000000000.0)
self.scanRecoverySpin.setObjectName(_fromUtf8("scanRecoverySpin"))
self.label_5 = QtGui.QLabel(self.sweepContinuallyGroup)
self.label_5.setGeometry(QtCore.QRect(10, 40, 120, 20))
self.label_5.setObjectName(_fromUtf8("label_5"))
self.nSweepSpin = QtGui.QSpinBox(self.sweepContinuallyGroup)
self.nSweepSpin.setGeometry(QtCore.QRect(130, 40, 81, 20))
self.nSweepSpin.setMaximum(99999)
self.nSweepSpin.setProperty("value", 100)
self.nSweepSpin.setObjectName(_fromUtf8("nSweepSpin"))
self.label_11 = QtGui.QLabel(self.sweepContinuallyGroup)
self.label_11.setGeometry(QtCore.QRect(220, 10, 111, 20))
self.label_11.setObjectName(_fromUtf8("label_11"))
self.comboBox = QtGui.QComboBox(self.sweepContinuallyGroup)
self.comboBox.setEnabled(False)
self.comboBox.setGeometry(QtCore.QRect(330, 10, 50, 22))
self.comboBox.setObjectName(_fromUtf8("comboBox"))
self.comboBox.addItem(_fromUtf8(""))
self.comboBox.addItem(_fromUtf8(""))
self.comboBox.addItem(_fromUtf8(""))
self.comboBox.addItem(_fromUtf8(""))
self.checkBox = QtGui.QCheckBox(self.sweepContinuallyGroup)
self.checkBox.setEnabled(False)
self.checkBox.setGeometry(QtCore.QRect(220, 40, 170, 17))
self.checkBox.setObjectName(_fromUtf8("checkBox"))
self.groupBox = QtGui.QGroupBox(self.centralwidget)
self.groupBox.setGeometry(QtCore.QRect(410, 230, 200, 70))
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.shutterButton = QtGui.QPushButton(self.groupBox)
self.shutterButton.setEnabled(True)
self.shutterButton.setGeometry(QtCore.QRect(100, 30, 91, 30))
self.shutterButton.setObjectName(_fromUtf8("shutterButton"))
self.displayBlankCheck = QtGui.QCheckBox(self.groupBox)
self.displayBlankCheck.setGeometry(QtCore.QRect(10, 50, 91, 20))
self.displayBlankCheck.setObjectName(_fromUtf8("displayBlankCheck"))
self.outputCheck = QtGui.QCheckBox(self.groupBox)
self.outputCheck.setGeometry(QtCore.QRect(10, 30, 91, 20))
self.outputCheck.setObjectName(_fromUtf8("outputCheck"))
self.livePlotCheck = QtGui.QCheckBox(self.groupBox)
self.livePlotCheck.setEnabled(False)
self.livePlotCheck.setGeometry(QtCore.QRect(10, 10, 91, 20))
self.livePlotCheck.setObjectName(_fromUtf8("livePlotCheck"))
self.groupBox_2 = QtGui.QGroupBox(self.centralwidget)
self.groupBox_2.setGeometry(QtCore.QRect(460, 0, 150, 50))
self.groupBox_2.setObjectName(_fromUtf8("groupBox_2"))
self.deviceAreaEdit = QtGui.QLineEdit(self.groupBox_2)
self.deviceAreaEdit.setGeometry(QtCore.QRect(40, 20, 71, 20))
self.deviceAreaEdit.setObjectName(_fromUtf8("deviceAreaEdit"))
self.groupBox_3 = QtGui.QGroupBox(self.centralwidget)
self.groupBox_3.setGeometry(QtCore.QRect(10, 300, 600, 60))
self.groupBox_3.setObjectName(_fromUtf8("groupBox_3"))
self.dirEdit = QtGui.QLineEdit(self.groupBox_3)
self.dirEdit.setGeometry(QtCore.QRect(90, 20, 501, 30))
self.dirEdit.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.dirEdit.setObjectName(_fromUtf8("dirEdit"))
self.browseButton = QtGui.QPushButton(self.groupBox_3)
self.browseButton.setGeometry(QtCore.QRect(10, 20, 75, 30))
self.browseButton.setObjectName(_fromUtf8("browseButton"))
self.groupBox_4 = QtGui.QGroupBox(self.centralwidget)
self.groupBox_4.setGeometry(QtCore.QRect(10, 360, 601, 60))
self.groupBox_4.setObjectName(_fromUtf8("groupBox_4"))
self.fileEdit = QtGui.QLineEdit(self.groupBox_4)
self.fileEdit.setGeometry(QtCore.QRect(10, 20, 580, 30))
self.fileEdit.setObjectName(_fromUtf8("fileEdit"))
IVSweeper.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(IVSweeper)
self.menubar.setGeometry(QtCore.QRect(0, 0, 622, 21))
self.menubar.setObjectName(_fromUtf8("menubar"))
self.menuGreyRules = QtGui.QMenu(self.menubar)
self.menuGreyRules.setObjectName(_fromUtf8("menuGreyRules"))
IVSweeper.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(IVSweeper)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
IVSweeper.setStatusBar(self.statusbar)
self.actionQuit = QtGui.QAction(IVSweeper)
self.actionQuit.setObjectName(_fromUtf8("actionQuit"))
self.actionRun_Test_Code = QtGui.QAction(IVSweeper)
self.actionRun_Test_Code.setObjectName(_fromUtf8("actionRun_Test_Code"))
self.menuGreyRules.addAction(self.actionRun_Test_Code)
self.menuGreyRules.addAction(self.actionQuit)
self.menubar.addAction(self.menuGreyRules.menuAction())
self.retranslateUi(IVSweeper)
self.speedCombo.setCurrentIndex(0)
self.saveModeCombo.setCurrentIndex(1)
QtCore.QObject.connect(self.actionQuit, QtCore.SIGNAL(_fromUtf8("triggered()")), IVSweeper.close)
QtCore.QMetaObject.connectSlotsByName(IVSweeper)
def retranslateUi(self, IVSweeper):
IVSweeper.setWindowTitle(_translate("IVSweeper", "IV Sweep Dynamics Investigator", None))
self.addressGroup.setTitle(_translate("IVSweeper", "Instrument Selection", None))
self.instrumentCombo.setItemText(0, _translate("IVSweeper", "(Re)Scan for Instruments", None))
self.sweepButton.setText(_translate("IVSweeper", "Start Sweep", None))
self.terminalsGroup.setTitle(_translate("IVSweeper", "Output Terminals", None))
self.frontRadio.setToolTip(_translate("IVSweeper", "Use the terminals on the front of the device", None))
self.frontRadio.setText(_translate("IVSweeper", "Front", None))
self.rearRadio.setToolTip(_translate("IVSweeper", "Use the terminals on the rear of the device", None))
self.rearRadio.setText(_translate("IVSweeper", "Rear", None))
self.wiresGroup.setTitle(_translate("IVSweeper", "Measurement Type", None))
self.twowireRadio.setToolTip(_translate("IVSweeper", "Measure voltage across the INPUT/OUTPUT terminals", None))
self.twowireRadio.setText(_translate("IVSweeper", "Two Wire", None))
self.fourwireRadio.setToolTip(_translate("IVSweeper", "Measure voltage across the 4-WIRE SENSE terminals", None))
self.fourwireRadio.setText(_translate("IVSweeper", "Four Wire", None))
self.sweepGroup.setTitle(_translate("IVSweeper", "Sweep Setup", None))
self.label.setText(_translate("IVSweeper", "Start:", None))
self.label_2.setText(_translate("IVSweeper", "End:", None))
self.reverseButton.setText(_translate("IVSweeper", "Reverse Direction", None))
self.endSpin.setSuffix(_translate("IVSweeper", " mV", None))
self.startSpin.setSuffix(_translate("IVSweeper", " mV", None))
self.label_3.setText(_translate("IVSweeper", "Sweep Points:", None))
self.label_6.setText(_translate("IVSweeper", "Step Delay:", None))
self.autoAdvance.setToolTip(_translate("IVSweeper", "a robot chooses when to advance to the next sweep step", None))
self.autoAdvance.setText(_translate("IVSweeper", "Intelligent Advance", None))
self.deltaStep.setText(_translate("IVSweeper", "Δ=20 mV", None))
self.maxPowerCheck.setToolTip(_translate("IVSweeper", "<html><head/><body><p>DANGER: This is relatively untested and there\'s no guarentee it won\'t destroy your device. Make sure your compliance is set to prevent destruction.</p></body></html>", None))
self.maxPowerCheck.setText(_translate("IVSweeper", "Dwell @ Max Power", None))
self.delaySpinBox.setToolTip(_translate("IVSweeper", "After reaching a new source value in the sweep, this amount of time will pass before advancing to the next value", None))
self.delaySpinBox.setSuffix(_translate("IVSweeper", " s", None))
self.totalLabel.setText(_translate("IVSweeper", "tot=2.000s", None))
self.daqGroup.setTitle(_translate("IVSweeper", "Data Acquisition Settings", None))
self.zeroCheck.setToolTip(_translate("IVSweeper", "Theoretically improves accuracy at the expense of speed", None))
self.zeroCheck.setText(_translate("IVSweeper", "Auto Zero", None))
self.speedCombo.setToolTip(_translate("IVSweeper", "<html><head/><body><p>Set integration period for a single data point</p><p>• FAST — Sets speed to 0.01 PLC and sets display resolution to 3½ digits.</p><p>• MED — Sets speed to 0.10 PLC and sets display resolution to 4½ digits.</p><p>• NORMAL — Sets speed to 1.00 PLC and sets display resolution to 5½ digits.</p><p>• HI ACCURACY — Sets speed to 10.00 PLC and sets display resolution to 6½</p><p>digits.</p></body></html>", None))
self.speedCombo.setItemText(0, _translate("IVSweeper", "Fast", None))
self.speedCombo.setItemText(1, _translate("IVSweeper", "Medium", None))
self.speedCombo.setItemText(2, _translate("IVSweeper", "Normal", None))
self.speedCombo.setItemText(3, _translate("IVSweeper", "High Accuracy", None))
self.label_7.setText(_translate("IVSweeper", "Speed:", None))
self.averageSpin.setToolTip(_translate("IVSweeper", "repeating filter. danger, this *could* smear data between sweep steps, set to zero to disable", None))
self.label_8.setText(_translate("IVSweeper", "Points to average:", None))
self.saveModeCombo.setItemText(0, _translate("IVSweeper", "I,V vs t", None))
self.saveModeCombo.setItemText(1, _translate("IVSweeper", "I vs V", None))
self.label_10.setText(_translate("IVSweeper", "Mode:", None))
self.complianceGroup.setTitle(_translate("IVSweeper", "Compliance and Range", None))
self.complianceSpin.setToolTip(_translate("IVSweeper", "Set this just above the maximum you expect", None))
self.complianceSpin.setSuffix(_translate("IVSweeper", " mA", None))
self.label_9.setText(_translate("IVSweeper", "Plus and minus", None))
self.modeGroup.setTitle(_translate("IVSweeper", "Source", None))
self.sourceVRadio.setToolTip(_translate("IVSweeper", "Sweep voltage source", None))
self.sourceVRadio.setText(_translate("IVSweeper", "Voltage", None))
self.sourceIRadio.setToolTip(_translate("IVSweeper", "Sweep current source", None))
self.sourceIRadio.setText(_translate("IVSweeper", "Current", None))
self.sweepContinuallyGroup.setTitle(_translate("IVSweeper", "Consecutive Sweeps", None))
self.label_4.setText(_translate("IVSweeper", "Delay Between Scans:", None))
self.scanRecoverySpin.setSuffix(_translate("IVSweeper", " s", None))
self.label_5.setText(_translate("IVSweeper", "Number of Sweeps:", None))
self.label_11.setText(_translate("IVSweeper", "Hold Between Scans:", None))
self.comboBox.setItemText(0, _translate("IVSweeper", "L=∞", None))
self.comboBox.setItemText(1, _translate("IVSweeper", "Voc", None))
self.comboBox.setItemText(2, _translate("IVSweeper", "Jsc", None))
self.comboBox.setItemText(3, _translate("IVSweeper", "Mppt", None))
self.checkBox.setText(_translate("IVSweeper", "Toggle Shutter Between Scans", None))
self.groupBox.setTitle(_translate("IVSweeper", "Miscellaneous", None))
self.shutterButton.setText(_translate("IVSweeper", "Toggle Shutter", None))
self.displayBlankCheck.setToolTip(_translate("IVSweeper", "This blanks the Keithley\'s display during measurement, allowing for a slight increase in measurement speed", None))
self.displayBlankCheck.setText(_translate("IVSweeper", "Blank Display", None))
self.outputCheck.setText(_translate("IVSweeper", "Output On", None))
self.livePlotCheck.setText(_translate("IVSweeper", "Live Data View", None))
self.groupBox_2.setTitle(_translate("IVSweeper", "Device Area [cm^2]", None))
self.deviceAreaEdit.setToolTip(_translate("IVSweeper", "Enter Device Area Here", None))
self.deviceAreaEdit.setText(_translate("IVSweeper", "0.12", None))
self.groupBox_3.setTitle(_translate("IVSweeper", "Output Directory", None))
self.browseButton.setText(_translate("IVSweeper", "Browse", None))
self.groupBox_4.setTitle(_translate("IVSweeper", "Output File Name", None))
self.menuGreyRules.setTitle(_translate("IVSweeper", "File", None))
self.actionQuit.setText(_translate("IVSweeper", "Quit", None))
self.actionRun_Test_Code.setText(_translate("IVSweeper", "Run Test Code", None))
| mit | 729,008,163,980,921,100 | 63.479784 | 490 | 0.697475 | false | 3.627843 | true | false | false |
all-of-us/raw-data-repository | rdr_service/alembic/versions/6f9266e7a5fb_initial_metrics.py | 1 | 2068 | """Initial metrics
Revision ID: 6f9266e7a5fb
Revises: 51415576d3e9
Create Date: 2017-12-12 10:38:27.166562
"""
import model.utils
import sqlalchemy as sa
from alembic import op
from rdr_service.participant_enums import MetricSetType, MetricsKey
# revision identifiers, used by Alembic.
revision = "6f9266e7a5fb"
down_revision = "51415576d3e9"
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"metric_set",
sa.Column("metric_set_id", sa.String(length=50), nullable=False),
sa.Column("metric_set_type", model.utils.Enum(MetricSetType), nullable=False),
sa.Column("last_modified", model.utils.UTCDateTime(), nullable=False),
sa.PrimaryKeyConstraint("metric_set_id"),
schema="metrics",
)
op.create_table(
"aggregate_metrics",
sa.Column("metric_set_id", sa.String(length=50), nullable=False),
sa.Column("metrics_key", model.utils.Enum(MetricsKey), nullable=False),
sa.Column("value", sa.String(length=50), nullable=False),
sa.Column("count", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(["metric_set_id"], ["metrics.metric_set.metric_set_id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("metric_set_id", "metrics_key", "value"),
schema="metrics",
)
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("aggregate_metrics", schema="metrics")
op.drop_table("metric_set", schema="metrics")
# ### end Alembic commands ###
| bsd-3-clause | 7,656,659,641,091,067,000 | 29.411765 | 109 | 0.657157 | false | 3.571675 | false | false | false |
grevutiu-gabriel/sympy | sympy/ntheory/multinomial.py | 91 | 6870 | from __future__ import print_function, division
from collections import defaultdict
from sympy.core.compatibility import range
def binomial_coefficients(n):
"""Return a dictionary containing pairs :math:`{(k1,k2) : C_kn}` where
:math:`C_kn` are binomial coefficients and :math:`n=k1+k2`.
Examples
========
>>> from sympy.ntheory import binomial_coefficients
>>> binomial_coefficients(9)
{(0, 9): 1, (1, 8): 9, (2, 7): 36, (3, 6): 84,
(4, 5): 126, (5, 4): 126, (6, 3): 84, (7, 2): 36, (8, 1): 9, (9, 0): 1}
See Also
========
binomial_coefficients_list, multinomial_coefficients
"""
d = {(0, n): 1, (n, 0): 1}
a = 1
for k in range(1, n//2 + 1):
a = (a * (n - k + 1))//k
d[k, n - k] = d[n - k, k] = a
return d
def binomial_coefficients_list(n):
""" Return a list of binomial coefficients as rows of the Pascal's
triangle.
Examples
========
>>> from sympy.ntheory import binomial_coefficients_list
>>> binomial_coefficients_list(9)
[1, 9, 36, 84, 126, 126, 84, 36, 9, 1]
See Also
========
binomial_coefficients, multinomial_coefficients
"""
d = [1] * (n + 1)
a = 1
for k in range(1, n//2 + 1):
a = (a * (n - k + 1))//k
d[k] = d[n - k] = a
return d
def multinomial_coefficients0(m, n, _tuple=tuple, _zip=zip):
"""Return a dictionary containing pairs ``{(k1,k2,..,km) : C_kn}``
where ``C_kn`` are multinomial coefficients such that
``n=k1+k2+..+km``.
For example:
>>> from sympy import multinomial_coefficients
>>> multinomial_coefficients(2, 5) # indirect doctest
{(0, 5): 1, (1, 4): 5, (2, 3): 10, (3, 2): 10, (4, 1): 5, (5, 0): 1}
The algorithm is based on the following result:
Consider a polynomial and its ``n``-th exponent::
P(x) = sum_{i=0}^m p_i x^i
P(x)^n = sum_{k=0}^{m n} a(n,k) x^k
The coefficients ``a(n,k)`` can be computed using the
J.C.P. Miller Pure Recurrence [see D.E.Knuth, Seminumerical
Algorithms, The art of Computer Programming v.2, Addison
Wesley, Reading, 1981;]::
a(n,k) = 1/(k p_0) sum_{i=1}^m p_i ((n+1)i-k) a(n,k-i),
where ``a(n,0) = p_0^n``.
"""
if not m:
if n:
return {}
return {(): 1}
if m == 2:
return binomial_coefficients(n)
symbols = [(0,)*i + (1,) + (0,)*(m - i - 1) for i in range(m)]
s0 = symbols[0]
p0 = [_tuple(aa - bb for aa, bb in _zip(s, s0)) for s in symbols]
r = {_tuple(aa*n for aa in s0): 1}
l = [0] * (n*(m - 1) + 1)
l[0] = r.items()
for k in range(1, n*(m - 1) + 1):
d = defaultdict(int)
for i in range(1, min(m, k + 1)):
nn = (n + 1)*i - k
if not nn:
continue
t = p0[i]
for t2, c2 in l[k - i]:
tt = _tuple([aa + bb for aa, bb in _zip(t2, t)])
d[tt] += nn*c2
if not d[tt]:
del d[tt]
r1 = [(t, c//k) for (t, c) in d.items()]
l[k] = r1
r.update(r1)
return r
def multinomial_coefficients(m, n):
r"""Return a dictionary containing pairs ``{(k1,k2,..,km) : C_kn}``
where ``C_kn`` are multinomial coefficients such that
``n=k1+k2+..+km``.
For example:
>>> from sympy.ntheory import multinomial_coefficients
>>> multinomial_coefficients(2, 5) # indirect doctest
{(0, 5): 1, (1, 4): 5, (2, 3): 10, (3, 2): 10, (4, 1): 5, (5, 0): 1}
The algorithm is based on the following result:
.. math::
\binom{n}{k_1, \ldots, k_m} =
\frac{k_1 + 1}{n - k_1} \sum_{i=2}^m \binom{n}{k_1 + 1, \ldots, k_i - 1, \ldots}
Code contributed to Sage by Yann Laigle-Chapuy, copied with permission
of the author.
See Also
========
binomial_coefficients_list, binomial_coefficients
"""
if not m:
if n:
return {}
return {(): 1}
if m == 2:
return binomial_coefficients(n)
if m >= 2*n and n > 1:
return dict(multinomial_coefficients_iterator(m, n))
t = [n] + [0] * (m - 1)
r = {tuple(t): 1}
if n:
j = 0 # j will be the leftmost nonzero position
else:
j = m
# enumerate tuples in co-lex order
while j < m - 1:
# compute next tuple
tj = t[j]
if j:
t[j] = 0
t[0] = tj
if tj > 1:
t[j + 1] += 1
j = 0
start = 1
v = 0
else:
j += 1
start = j + 1
v = r[tuple(t)]
t[j] += 1
# compute the value
# NB: the initialization of v was done above
for k in range(start, m):
if t[k]:
t[k] -= 1
v += r[tuple(t)]
t[k] += 1
t[0] -= 1
r[tuple(t)] = (v * tj) // (n - t[0])
return r
def multinomial_coefficients_iterator(m, n, _tuple=tuple):
"""multinomial coefficient iterator
This routine has been optimized for `m` large with respect to `n` by taking
advantage of the fact that when the monomial tuples `t` are stripped of
zeros, their coefficient is the same as that of the monomial tuples from
``multinomial_coefficients(n, n)``. Therefore, the latter coefficients are
precomputed to save memory and time.
>>> from sympy.ntheory.multinomial import multinomial_coefficients
>>> m53, m33 = multinomial_coefficients(5,3), multinomial_coefficients(3,3)
>>> m53[(0,0,0,1,2)] == m53[(0,0,1,0,2)] == m53[(1,0,2,0,0)] == m33[(0,1,2)]
True
Examples
========
>>> from sympy.ntheory.multinomial import multinomial_coefficients_iterator
>>> it = multinomial_coefficients_iterator(20,3)
>>> next(it)
((3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), 1)
"""
if m < 2*n or n == 1:
mc = multinomial_coefficients(m, n)
for k, v in mc.items():
yield(k, v)
else:
mc = multinomial_coefficients(n, n)
mc1 = {}
for k, v in mc.items():
mc1[_tuple(filter(None, k))] = v
mc = mc1
t = [n] + [0] * (m - 1)
t1 = _tuple(t)
b = _tuple(filter(None, t1))
yield (t1, mc[b])
if n:
j = 0 # j will be the leftmost nonzero position
else:
j = m
# enumerate tuples in co-lex order
while j < m - 1:
# compute next tuple
tj = t[j]
if j:
t[j] = 0
t[0] = tj
if tj > 1:
t[j + 1] += 1
j = 0
else:
j += 1
t[j] += 1
t[0] -= 1
t1 = _tuple(t)
b = _tuple(filter(None, t1))
yield (t1, mc[b])
| bsd-3-clause | 5,633,455,681,898,301,000 | 27.74477 | 88 | 0.487482 | false | 3.079337 | false | false | false |
wakalixes/sqldataplot | gaussian_error_propagation.py | 1 | 1477 | #--------------------------------------------------
# Revision = $Rev: 13 $
# Date = $Date: 2011-07-31 00:39:24 +0200 (Sun, 31 Jul 2011) $
# Author = $Author: stefan $
#--------------------------------------------------
from uncertainties import ufloat, nominal_value, std_dev
from sympy import *
#class vefloat():
# value = float
# error = float
#
# def __init__(self, value=0.,error=0.):
# self.value = value
# self.error = abs(error)
#
# def __str__(self):
# return "%g+-%g" % (self.value, self.error)
#
# def __repr__(self):
# return "vefloat(%s, %s)" % (self.value, self.error)
#
# def __float__(self):
# return self.value
def gaussian_error(term, *errors):
return sqrt(reduce(Add, (
(term.diff(var)*Symbol("sigma_"+var.name))**2
for var in term.atoms(Symbol) if var.name in errors), 0.))
def gaussian_error_ufloat(term, **vars):
st = gaussian_error(term, *vars.keys())
subs = {}
for k,v in vars.iteritems():
subs[Symbol(k)] = nominal_value(v)
subs[Symbol("sigma_"+k)] = std_dev(v)
return ufloat((term.subs(subs),st.n(subs=subs, maxprec=10)))
if __name__ == "__main__":
fx = ufloat((1,.5))
fy = ufloat((4,.5))
print "error propagation analytically"
x,y = symbols("xy")
at = (x**2+y**2)/(x*y)
print gaussian_error(at, "x", "y")
print gaussian_error_ufloat(at, x=fx, y=fy)
print "error propagation numerically"
nt = (fx**2+fy**2)/(fx*fy)
print nt | gpl-2.0 | 4,947,383,098,462,776,000 | 25.392857 | 67 | 0.540961 | false | 2.90748 | false | false | false |
ibmjstart/bluemix-python-sample-twitter-influence-app | app/cloudant-python/cloudant/account.py | 1 | 3959 | from .resource import Resource
from .database import Database
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
class Account(Resource):
"""
An account to a Cloudant or CouchDB account.
# connects to http://localhost:5984
# if a string is passed, connects to %s.cloudant.com
account = cloudant.Account()
response = account.login(USERNAME, PASSWORD)
print response.json()
# { "ok": True, ... }
Like all Cloudant-Python objects, pass `async=True`
to make asynchronous requests, like this:
account = cloudant.Account(async=True)
future = account.login(USERNAME, PASSWORD)
response = future.result()
print response.json()
# { "ok": True, ... }
Although you can use `login` to request a cookie,
you can also set `account._session.auth` to make Cloudant-Python
use those credentials on every request, like this:
account = cloudant.Account()
account._session.auth = (username, password)
"""
def __init__(self, uri="http://localhost:5984", **kwargs):
if not urlparse.urlparse(uri).scheme:
uri = "https://%s.cloudant.com" % uri
super(Account, self).__init__(uri, **kwargs)
def database(self, name, **kwargs):
"""Create a `Database` object prefixed with this account's URL."""
opts = dict(self.opts, **kwargs)
return Database(self._make_url(name), session=self._session, **opts)
def __getitem__(self, name):
"""Shortcut to `Account.database`."""
return self.database(name, **self.opts)
def __delitem__(self, name):
"""
Delete a database named `name`.
Blocks until the response returns,
and raises an error if the deletion failed.
"""
response = self.database(name, **self.opts).delete()
# block until result if the object is using async
if hasattr(response, 'result'):
response = response.result()
response.raise_for_status()
def session(self, **kwargs):
"""Get current user's authentication and authorization status."""
return self.get(self._reset_path('_session'), **kwargs)
def login(self, username, password, **kwargs):
"""Authenticate the connection via cookie."""
# set headers, body explicitly
headers = {
"Content-Type": "application/x-www-form-urlencoded"
}
data = "name=%s&password=%s" % (username, password)
return self.post(self._reset_path('_session'), headers=headers,
data=data, **kwargs)
def logout(self, **kwargs):
"""De-authenticate the connection's cookie."""
return self.delete(self._reset_path('_session'), **kwargs)
def all_dbs(self, **kwargs):
"""List all databases."""
return self.get('_all_dbs', **kwargs)
def active_tasks(self, **kwargs):
"""List replication, compaction, and indexer tasks currently running."""
return self.get('_active_tasks', **kwargs)
def replicate(self, source, target, opts={}, **kwargs):
"""
Begin a replication job.
`opts` contains replication options such as whether the replication
should create the target (`create_target`) or whether the replication
is continuous (`continuous`).
Note: unless continuous, will not return until the job is finished.
"""
params = {
'source': source,
'target': target
}
params.update(opts)
if 'params' in kwargs:
params.update(kwargs['params'])
del kwargs['params']
return self.post('_replicate', params=params, **kwargs)
def uuids(self, count=1, **kwargs):
"""Generate an arbitrary number of UUIDs."""
params = dict(count=count)
return self.get('_uuids', params=params, **kwargs)
| apache-2.0 | -3,222,664,319,702,423,600 | 33.12931 | 80 | 0.602172 | false | 4.345774 | false | false | false |
daltonsena/eyed3 | examples/tag_example.py | 2 | 6993 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
# Copyright (C) 2012 Travis Shirk <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
################################################################################
from eyed3.id3 import Tag
from eyed3.id3 import ID3_V1_0, ID3_V1_1, ID3_V2_3, ID3_V2_4
import logging
from eyed3 import log
log.setLevel(logging.DEBUG)
t = Tag()
t.artist = u"M.O.P."
t.title = u"How About Some Hardcore"
t.album = u"To The Death"
t.genre = u"Hip-Hop"
t.track_num = (3,5)
t.disc_num = (1,1)
t.original_release_date = "1994-04-07"
t.release_date = "1994-04-07"
t.encoding_date = "2002-03"
t.recording_date = 1996
t.tagging_date = "2012-2-5"
t.comments.set(u"Gritty, yo!")
t.comments.set(u"Brownsville, Brooklyn", u"Origin")
t.user_text_frames.set(u"****", u"Rating")
t.artist_url = b"http://allmusic.com/artist/mop-p194909"
t.user_url_frames.set(b"http://eyed3.nicfit.net/")
t.bpm = 187
t.play_count = 125
t.unique_file_ids.set(b"43e888e067ea107f964916af6259cbe7", "md5sum")
t.cd_id = b"\x3c\x33\x4d\x41\x43\x59\x3c\x33"
t.privates.set("Secrets", "Billy Danzenie")
t.terms_of_use = u"Blunted"
t.lyrics.set(u"""
[ Billy Danzenie ]
How about some hardcore?
(Yeah, we like it raw!) (4x)
How about some hardcore?
[ VERSE 1: Billy Danzenie ]
(Yeah, we like it raw in the streets)
For the fellas on the corner posted up 20 deep
With your ifth on your hip, ready to flip
Whenever you empty your clip, dip, trip your sidekick
You got skill, you best manage to chill
And do yourself a favor, don`t come nowhere near the Hill
With that bullshit, word, money grip, it`ll cost ya
Make you reminisce of Frank Nitty `The Enforcer`
I move with M.O.P.`s Last Generation
Straight up and down, act like you want a confrontation
I packs my gat, I gotta stay strapped
I bust mines, don`t try to sneak up on me from behind
Don`t sleep, I get deep when I creep
I see right now I got to show you it ain`t nothin sweet
Go get your muthaf**kin hammer
And act like you want drama
I send a message to your mama
`Hello, do you know your one son left?
I had license to kill and he had been marked for death
He`s up the Hill in the back of the building with two in the dome
I left him stiffer than a tombstone`
[ Li`l Fame ]
How about some hardcore?
(Yeah, we like it raw!) (4x)
How about some hardcore?
[ VERSE 2: Billy Danzenie ]
(Yeah, we like it rugged in the ghetto)
I used to pack sling shots, but now I`m packin heavy metal
A rugged underground freestyler
Is Li`l Fame, muthaf**ka, slap, Li`l Mallet
When I let off, it`s a burning desire
Niggas increase the peace cause when I release it be rapid fire
For the cause I drop niggas like drawers
Niggas`ll hit the floors from the muthaf**kin .44`s
I`m talkin titles when it`s showtime
f**k around, I have niggas call the injury help line
I bust words in my verse that`ll serve
Even on my first nerve I put herbs to curbs
I ain`t about givin niggas a chance
And I still raise sh*t to make my brother wanna get up and dance
Front, I make it a thrill to kill
Bringin the ruckus, it`s the neighborhood hoods for the Hill that`s real
Me and mics, that`s unlike niggas and dykes
So who wanna skate, cause I`m puttin niggas on ice
Whatever I drop must be rough, rugged and hard more
(Yeah!)
[ Billy Danzenie ]
How about some hardcore?
(Yeah, we like it raw!) (4x)
[ VERSE 3: Billy Danzenie ]
Yo, here I am (So what up?) Get it on, cocksucker
That nigga Bill seem to be a ill black brother
I gets dough from the way I flow
And before I go
You muthaf**kas gonna know
That I ain`t nothin to f**k with - duck quick
I squeeze when I`m stressed
Them teflons`ll tear through your vest
I love a bloodbath (niggas know the half)
You can feel the wrath (Saratoga/St. Marks Ave.)
B-i-l-l-y D-a-n-z-e
n-i-e, me, Billy Danzenie
(Knock, knock) Who`s there? (Li`l Fame)
Li`l Fame who? (Li`l Fame, your nigga)
Boom! Ease up off the trigger
It`s aight, me and shorty go to gunfights
Together we bring the ruckus, right?
We trump tight, aight?
I earned mine, so I`m entitled to a title
(7 f**kin 30) that means I`m homicidal
[ Li`l Fame ]
How about some hardcore?
(Yeah, we like it raw!) (4x)
[ VERSE 4: Li`l Fame ]
Yo, I scream on niggas like a rollercoaster
To them wack muthaf**kas, go hang it up like a poster
Niggas get excited, but don`t excite me
Don`t invite me, I`m splittin niggas` heads where the white be
Try to trash this, this little bastard`ll blast it
Only puttin niggas in comas and caskets
I ain`t a phoney, I put the `mack` in a -roni
I leave you lonely (Yeah, yeah, get on his ass, homie)
Up in your anus, I pack steel that`s stainless
We came to claim this, and Li`l Fame`ll make you famous
I mack hoes, rock shows and stack dough
Cause I`m in effect, knockin muthaf**kas like five-o
I`m catchin other niggas peepin, shit, I ain`t sleepin
I roll deep like a muthaf**kin Puerto-Rican
So when I write my competition looks sadly
For broke-ass niggas I make it happen like Mariah Carey
I got sh*t for niggas that roll bold
Li`l Fame is like a orthopedic shoe, I got mad soul
I`ma kill em before I duck em
Because yo, mother made em, mother had em and muthaf**k em
[ Li`l Fame ]
Knowmsayin?
Li`l Fame up in this muthaf**ka
Givin shoutouts to my man D/R Period
[Name]
Lazy Laz
My man Broke As* Moe
The whole Saratoga Ave.
Youknowmsayin?
Representin for Brooklyn
Most of all my cousin Prince Leroy, Big Mal, rest in peace
[ Billy Danzenie ]
Danzenie up in this muthaf**ka
I`d like to say what`s up to the whole M.O.P.
Brooklyn, period
Them niggas that just don`t give a f**k
[ O.G. Bu-Bang
Bet yo ass, nigga
Hey yo, this muthaf**kin Babyface [Name]
Aka O.G. Bu-Bang
Yo, I wanna say what`s up to the whole muthaf**kin M.O.P. boyyeee
""")
t.save("example-v2_4.id3", version=ID3_V2_4)
t.save("example-v2_3.id3", version=ID3_V2_3)
# Loss of the release date month and day.
# Loss of the comment with description.
t.save("example-v1_1.id3", version=ID3_V1_1)
# Loses what v1.1 loses, and the track #
t.save("example-v1_0.id3", version=ID3_V1_0)
'''
from eyed3.id3.tag import TagTemplate
template = "$artist/"\
"$best_release_date:year - $album/"\
"$artist - $track:num - $title.$file:ext"
print TagTemplate(template).substitute(t, zeropad=True)
'''
| gpl-2.0 | 2,091,011,244,069,950,200 | 33.618812 | 80 | 0.713428 | false | 2.623031 | false | false | false |
kaspermarstal/SimpleElastix | Testing/Unit/Python/sitkGetArrayViewFromImageTest.py | 2 | 6220 | #==========================================================================
#
# Copyright Insight Software Consortium
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#==========================================================================*/
from __future__ import print_function
import sys
import unittest
import datetime as dt
import timeit
import SimpleITK as sitk
import numpy as np
sizeX = 4
sizeY = 5
sizeZ = 3
newSimpleITKPixelValueInt32 = -3000
newNumPyElementValueInt32 = 200
class TestNumpySimpleITKMemoryviewInterface(unittest.TestCase):
""" This tests numpy array <-> SimpleITK Image conversion. """
def setUp(self):
pass
def _helper_check_sitk_to_numpy_type(self, sitkType, numpyType):
if sitkType == sitk.sitkUnknown:
return
image = sitk.Image((9, 10), sitkType, 1)
a = sitk.GetArrayViewFromImage(image)
self.assertEqual(numpyType, a.dtype)
self.assertEqual((10, 9), a.shape)
def test_type_to_numpy(self):
"try all sitk pixel types to convert to NumPy array view"
self._helper_check_sitk_to_numpy_type(sitk.sitkUInt8, np.uint8)
self._helper_check_sitk_to_numpy_type(sitk.sitkUInt16, np.uint16)
self._helper_check_sitk_to_numpy_type(sitk.sitkUInt32, np.uint32)
self._helper_check_sitk_to_numpy_type(sitk.sitkUInt64, np.uint64)
self._helper_check_sitk_to_numpy_type(sitk.sitkInt8, np.int8)
self._helper_check_sitk_to_numpy_type(sitk.sitkInt16, np.int16)
self._helper_check_sitk_to_numpy_type(sitk.sitkInt32, np.int32)
self._helper_check_sitk_to_numpy_type(sitk.sitkInt64, np.int64)
self._helper_check_sitk_to_numpy_type(sitk.sitkFloat32, np.float32)
self._helper_check_sitk_to_numpy_type(sitk.sitkFloat64, np.float64)
self._helper_check_sitk_to_numpy_type(sitk.sitkVectorUInt8, np.uint8)
self._helper_check_sitk_to_numpy_type(sitk.sitkVectorInt8, np.int8)
self._helper_check_sitk_to_numpy_type(sitk.sitkVectorUInt16, np.uint16)
self._helper_check_sitk_to_numpy_type(sitk.sitkVectorInt16, np.int16)
self._helper_check_sitk_to_numpy_type(sitk.sitkVectorUInt32, np.uint32)
self._helper_check_sitk_to_numpy_type(sitk.sitkVectorInt32, np.int32)
self._helper_check_sitk_to_numpy_type(sitk.sitkVectorUInt64, np.uint64)
self._helper_check_sitk_to_numpy_type(sitk.sitkVectorInt64, np.int64)
self._helper_check_sitk_to_numpy_type(sitk.sitkVectorFloat32, np.float32)
self._helper_check_sitk_to_numpy_type(sitk.sitkVectorFloat64, np.float64)
def test_to_numpy_and_back(self):
"""Test converting an image to NumPy array view and back"""
img = sitk.GaussianSource( sitk.sitkFloat32, [100,100], sigma=[10]*3, mean=[50,50] )
h = sitk.Hash( img )
img2 = sitk.GetImageFromArray( sitk.GetArrayViewFromImage(img))
self.assertEqual( h, sitk.Hash( img2 ))
def test_vector_image_to_numpy(self):
"""Test converting back and forth between NumPy array view and SimpleITK
images where the SimpleITK image has multiple components and
stored as a VectorImage."""
# Check 2D
img = sitk.PhysicalPointSource(sitk.sitkVectorFloat32, [3,4])
h = sitk.Hash( img )
nda = sitk.GetArrayViewFromImage(img)
self.assertEqual(nda.shape, (4,3,2))
self.assertEqual(nda[0,0].tolist(), [0,0])
self.assertEqual(nda[2,1].tolist(), [1,2])
self.assertEqual(nda[0,:,0].tolist(), [0,1,2])
img2 = sitk.GetImageFromArray(nda, isVector=True)
self.assertEqual(h, sitk.Hash(img2))
# check 3D
img = sitk.PhysicalPointSource(sitk.sitkVectorFloat32, [3,4,5])
h = sitk.Hash(img)
nda = sitk.GetArrayViewFromImage(img)
self.assertEqual(nda.shape, (5,4,3,3))
self.assertEqual(nda[0,0,0].tolist(), [0,0,0])
self.assertEqual(nda[0,0,:,0].tolist(), [0,1,2])
self.assertEqual(nda[0,:,1,1].tolist(), [0,1,2,3])
img2 = sitk.GetImageFromArray(nda)
self.assertEqual(img2.GetSize(), img.GetSize())
self.assertEqual(img2.GetNumberOfComponentsPerPixel(), img.GetNumberOfComponentsPerPixel())
self.assertEqual(h, sitk.Hash(img2))
def test_arrayview_writable(self):
"""Test correct behavior of writablity to the returned array view."""
img = sitk.Image((9, 10), sitk.sitkFloat32, 1)
a = sitk.GetArrayViewFromImage(img)
with self.assertRaises(ValueError):
a.fill(0)
def test_processing_time(self):
"""Check the processing time the conversions from SimpleITK Image
to numpy array (GetArrayViewFromImage) and
numpy memoryview (GetArrayViewFromImage)."""
# Performance test for SimpleITK Image -> NumPy array
img = sitk.GaussianSource(sitk.sitkFloat32, [3000,3000], sigma=[10]*3, mean=[50,50])
print("\nGet NumPy array from 3000x3000 SimpleITK Image")
nparray_time_elapsed = min(timeit.repeat(lambda: sitk.GetArrayFromImage(img), repeat=5, number=1))
print ("Processing time of GetArrayFromImage (Copy operation) :: {0} (us)".format(nparray_time_elapsed*1e6))
npview_time_elapsed = min(timeit.repeat(lambda: sitk.GetArrayViewFromImage(img), repeat=5, number=1))
print ("Processing time of GetArrayViewFromImage (Array view) :: {0} (us)".format(npview_time_elapsed*1e6))
self.assertTrue( nparray_time_elapsed > npview_time_elapsed)
# Performance test for NumPy array -> SimpleITK Image
Big_nparray = np.zeros((3000,3000), dtype=np.int64);
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 3,521,244,001,422,895,000 | 38.871795 | 118 | 0.659164 | false | 3.229491 | true | false | false |
rupran/librarytrader | scripts/parse_collected_uprobes.py | 1 | 5273 | #!/usr/bin/env python3
#
# Copyright 2018, Andreas Ziegler <[email protected]>
#
# This file is part of librarytrader.
#
# librarytrader is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# librarytrader is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with librarytrader. If not, see <http://www.gnu.org/licenses/>.
import collections
import os
import re
import sys
# In order to be able to use librarytrader from git without having installed it,
# add top level directory to PYTHONPATH
sys.path.append(os.path.join(os.path.abspath(os.path.dirname(__file__)), '..'))
from librarytrader.librarystore import LibraryStore
def normalize(path):
return re.sub(r'\W', '_', path[1:])
storepath = sys.argv[1]
collectpath = sys.argv[2]
uprobe_file_path = sys.argv[3]
num_to_path = {}
with open(uprobe_file_path, 'r') as infd:
for line in infd:
line = line.strip()
name, path_and_offset = line.split(' ', 1)
path, offset = path_and_offset.split(':')
num_to_path[name[2:]] = (path, int(offset, 16))
store = LibraryStore()
store.load(storepath)
parsed_mapping = collections.defaultdict(set)
matches_global = 0
matches_local = 0
traced_only_binaries = 0
traced_only_libraries = 0
histo_by_lib_global = collections.defaultdict(int)
histo_by_lib_local = collections.defaultdict(int)
with open(collectpath, 'r') as collectfd:
for line in collectfd:
line = line.strip()
path, offset = num_to_path[line]
lib = store.get(path)
if not lib:
print('ERROR: {} not found!'.format(path))
continue
fnames = lib.exported_addrs[offset]
print(offset, fnames)
if not fnames:
if offset in lib.local_functions:
matches_local += 1
if offset not in lib.local_users or len(lib.local_users[offset]) == 0 \
or set(lib.local_users[offset]) == set(['EXTERNAL']):
print('LOCAL: traced usage but no static user: {}:{}'.format(lib.fullname, hex(offset)))
histo_by_lib_local[lib.fullname] += 1
if ".so" in lib.fullname:
print('{}:{}:{}'.format(lib.fullname, offset, lib.ranges[offset]))
traced_only_libraries += 1
else:
traced_only_binaries += 1
# parsed_mapping[lib.fullname].add('LOCAL_{}'.format(offset))
for name in lib.local_functions[offset]:
parsed_mapping[lib.fullname].add('LOCAL_{}'.format(name))
print('LOCAL_{}'.format(offset), 'name set: {}'.format(lib.local_functions[offset]))
else:
print('no functions for {}:{}'.format(lib.fullname, hex(offset)))
continue
matches_global += 1
if offset not in lib.export_users or len(lib.export_users[offset]) == 0 \
or set(lib.export_users[offset]) == set(['EXTERNAL']):
print('EXPORT: traced usage but no static user: {}:{}'.format(lib.fullname, fnames))
if fnames[0] != '_init' and fnames[0] != '_fini':
histo_by_lib_global[lib.fullname] += 1
if ".so" in lib.fullname:
print('{}:{}:{}'.format(lib.fullname, offset, lib.ranges[offset]))
traced_only_libraries += 1
else:
traced_only_binaries += 1
parsed_mapping[lib.fullname].add(fnames[0])
n_export = 0
n_local = 0
n_lib = 0
for library in store.get_library_objects():
if ".so" in library.fullname:
n_lib += 1
n_export += len(library.exported_addrs)
n_local += len(library.local_functions)
mittel = n_export // n_lib
mittel_local = n_local // n_lib
print('global matches: {}, local matches: {}, traced only: bin {}, lib {}, avg exports {}, local {}, n_lib {}'.format(matches_global,
matches_local, traced_only_binaries, traced_only_libraries, mittel, mittel_local, n_lib))
with open(collectpath + '.matched', 'w') as outfd:
for lib, names in parsed_mapping.items():
for name in names:
outfd.write('{}:{}\n'.format(lib, name))
with open(collectpath + '.missed.local', 'w') as outfd:
for path, num in sorted(histo_by_lib_local.items(), key=lambda x:x[1]):
local_hit_path = len([x for x in parsed_mapping[path] if x.startswith("LOCAL_")])
outfd.write('{}:{}:{}:{}\n'.format(num, len(store[path].local_functions), local_hit_path, path))
with open(collectpath + '.missed.global', 'w') as outfd:
for path, num in sorted(histo_by_lib_global.items(), key=lambda x:x[1]):
global_hit_path = len([x for x in parsed_mapping[path] if not x.startswith("LOCAL_")])
outfd.write('{}:{}:{}:{}\n'.format(num, len(store[path].exported_addrs), global_hit_path, path))
| gpl-3.0 | 1,207,111,483,490,482,700 | 40.849206 | 133 | 0.61483 | false | 3.589517 | false | false | false |
vly/py_ga_lite | setup.py | 1 | 1077 | from distutils.core import setup
setup(
name='GA_lite',
version='0.1.0',
author='Val Lyashov',
author_email='[email protected]',
packages=['ga_lite', 'ga_lite.test'],
url='http://pypi.python.org/pypi/ga_lite/',
license='LICENSE.txt',
description='A lite Google Analytics Data Export API library.',
long_description=open('README.txt').read(),
install_requires=[
'requests >= 0.11.0',
'pysqlite >= 2.0.0'
],
classifiers=(
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
),
) | mit | -1,190,108,348,662,882,800 | 32.806452 | 67 | 0.562674 | false | 4.064151 | false | true | false |
garybake/blog | fabfile.py | 1 | 4852 | from fabric.api import *
import fabric.contrib.project as project
import os
import shutil
import sys
import SocketServer
from datetime import datetime
import livereload
from pelican.server import ComplexHTTPRequestHandler
# Local path configuration (can be absolute or relative to fabfile)
env.deploy_path = 'output'
DEPLOY_PATH = env.deploy_path
# Remote server configuration
production = 'root@localhost:22'
dest_path = '/var/www'
# Rackspace Cloud Files configuration settings
env.cloudfiles_username = 'my_rackspace_username'
env.cloudfiles_api_key = 'my_rackspace_api_key'
env.cloudfiles_container = 'my_cloudfiles_container'
# Github Pages configuration
env.github_pages_branch = "gh-pages"
# Port for `serve`
PORT = 8000
def clean():
"""Remove generated files"""
if os.path.isdir(DEPLOY_PATH):
shutil.rmtree(DEPLOY_PATH)
os.makedirs(DEPLOY_PATH)
def build():
"""Build local version of site"""
local('pelican -s pelicanconf.py')
def rebuild():
"""`clean` then `build`"""
clean()
build()
def regenerate():
"""Automatically regenerate site upon file modification"""
local('pelican -r -s pelicanconf.py')
def serve():
"""Serve site at http://localhost:8000/"""
os.chdir(env.deploy_path)
class AddressReuseTCPServer(SocketServer.TCPServer):
allow_reuse_address = True
server = AddressReuseTCPServer(('', PORT), ComplexHTTPRequestHandler)
sys.stderr.write('Serving on port {0} ...\n'.format(PORT))
server.serve_forever()
def reserve():
"""`build`, then `serve`"""
build()
serve()
def preview():
"""Build production version of site"""
local('pelican -s publishconf.py')
def cf_upload():
"""Publish to Rackspace Cloud Files"""
rebuild()
with lcd(DEPLOY_PATH):
local('swift -v -A https://auth.api.rackspacecloud.com/v1.0 '
'-U {cloudfiles_username} '
'-K {cloudfiles_api_key} '
'upload -c {cloudfiles_container} .'.format(**env))
@hosts(production)
def publish():
"""Publish to production via rsync"""
local('pelican -s publishconf.py')
project.rsync_project(
remote_dir=dest_path,
exclude=".DS_Store",
local_dir=DEPLOY_PATH.rstrip('/') + '/',
delete=True,
extra_opts='-c',
)
def gh_pages():
"""Publish to GitHub Pages"""
rebuild()
local("ghp-import -b {github_pages_branch} {deploy_path}".format(**env))
local("git push origin {github_pages_branch}".format(**env))
TEMPLATE_RST = """
{title}
{hashes}
:date: {year}-{month}-{day} {hour}:{minute:02d}
:tags:
:category:
:slug: {slug}
:summary:
:status: draft
"""
# TEMPLATE is declared before hand, and all the necessary imports made
def make_entry_rst(title):
today = datetime.today()
slug = title.lower().strip().replace(' ', '-')
f_create = "content/{}_{:0>2}_{:0>2}_{}.rst".format(
today.year, today.month, today.day, slug)
t = TEMPLATE_RST.strip().format(title=title,
hashes='#' * len(title),
year=today.year,
month=today.month,
day=today.day,
hour=today.hour,
minute=today.minute,
slug=slug)
with open(f_create, 'w') as w:
w.write(t)
print("File created -> " + f_create)
TEMPLATE_MD = """
Title: {title}
Date: {year}-{month}-{day} {hour}:{minute:02d}
Tags: thats, awesome
Category: yeah
Slug: {slug}
"""
# TEMPLATE is declared before hand, and all the necessary imports made
def make_entry(title):
today = datetime.today()
slug = title.lower().strip().replace(' ', '-')
f_create = "content/{}_{:0>2}_{:0>2}_{}.md".format(
today.year, today.month, today.day, slug)
t = TEMPLATE_MD.strip().format(title=title,
hashes='#' * len(title),
year=today.year,
month=today.month,
day=today.day,
hour=today.hour,
minute=today.minute,
slug=slug)
with open(f_create, 'w') as w:
w.write(t)
print("File created -> " + f_create)
def live_build(port=8080):
local('make clean') # 1
local('make html') # 2
os.chdir('output') # 3
server = livereload.Server() # 4
server.watch('../content/*.rst', # 5
livereload.shell('pelican -s ../pelicanconf.py -o ../output')) # 6
server.watch('../naffy/', # 7
livereload.shell('pelican -s ../pelicanconf.py -o ../output')) # 8
server.watch('*.html') # 9
server.watch('*.css') # 10
server.serve(liveport=35729, port=port) # 11 | mit | 2,321,050,343,797,999,600 | 27.547059 | 76 | 0.58141 | false | 3.642643 | false | false | false |
Balannen/LSMASOMM | atom3/Kernel/TypeModels/TypesMetaModel_MM.py | 1 | 12254 | from ASG_TypesMetaModel import *
from graph_ASG_ERmetaMetaModel import *
from Tkinter import *
from ATOM3TypeInfo import *
from ATOM3String import *
from StatusBar import *
from ATOM3TypeDialog import *
from TypeName import *
from LeafType import *
from ModelType import *
from Operator import *
def createNewASGroot(self):
return ASG_TypesMetaModel(self, None)
def createModelMenu(self, modelMenu):
"Creates a customized Model Menu for the actual formalism"
modelMenu = Menu(self.mmtoolMenu, tearoff=0)
modelMenu.add_command(label="new TypeName", command=lambda x=self: x.newModesTypeName(x) )
modelMenu.add_command(label="new LeafType", command=lambda x=self: x.newModesLeafType(x) )
modelMenu.add_command(label="new ModelType", command=lambda x=self: x.newModesModelType(x) )
modelMenu.add_command(label="new Operator", command=lambda x=self: x.newModesOperator(x) )
def setConnectivity(self):
self.ConnectivityMap['LeafType']={
'LeafType': []
,'TypeName': []
,'ModelType': []
,'Operator': [] }
self.ConnectivityMap['TypeName']={
'LeafType': [( 'Operator', self.createNewOperator)]
,'TypeName': [( 'Operator', self.createNewOperator)]
,'ModelType': [( 'Operator', self.createNewOperator)]
,'Operator': [] }
self.ConnectivityMap['ModelType']={
'LeafType': []
,'TypeName': []
,'ModelType': []
,'Operator': [] }
self.ConnectivityMap['Operator']={
'LeafType': []
,'TypeName': []
,'ModelType': []
,'Operator': [] }
self.CardinalityTable['TypeName']={
'TypeName': []
,'LeafType': []
,'ModelType': []
,'Operator': [('1', '1', 'Source'), ('0', 'N', 'Destination')] }
self.CardinalityTable['LeafType']={
'TypeName': []
,'LeafType': []
,'ModelType': []
,'Operator': [('1', '1', 'Destination')] }
self.CardinalityTable['ModelType']={
'TypeName': []
,'LeafType': []
,'ModelType': []
,'Operator': [('1', '1', 'Destination')] }
self.CardinalityTable['Operator']={
'TypeName': [('0', '1', 'Destination'), ('0', '1', 'Source')]
,'LeafType': [('0', 'N', 'Source')]
,'ModelType': [('0', 'N', 'Source')]
,'Operator': [('0', 'N', 'Source'), ('0', 'N', 'Destination')] }
self.entitiesInMetaModel['TypesMetaModel']=["TypeName", "LeafType", "ModelType", "Operator"]
def createNewTypeName(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = TypeName(self)
ne = len(self.ASGroot.listNodes["TypeName"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_TypeName(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_TypeName(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("TypeName", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewLeafType(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = LeafType(self)
ne = len(self.ASGroot.listNodes["LeafType"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_LeafType(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_LeafType(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("LeafType", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewModelType(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = ModelType(self)
ne = len(self.ASGroot.listNodes["ModelType"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_ModelType(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_ModelType(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("ModelType", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewOperator(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = Operator(self)
ne = len(self.ASGroot.listNodes["Operator"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_Operator(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_Operator(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("Operator", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNew_Model(self, wherex, wherey, screenCoordinates = 1):
self.toClass = None
self.fromClass = None
new_semantic_obj = ASG_TypesMetaModel(self)
ne = len(self.ASGroot.listNodes["ASG_TypesMetaModel"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_ASG_ERmetaMetaModel(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_ASG_ERmetaMetaModel(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("ASG_TypesMetaModel", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def fillTypesInformation(self):
objs = []
obj = ATOM3TypeInfo(self)
params = []
obj.setValue(("String", "ATOM3String", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
param = ATOM3String("None")
params.append(param)
param = ATOM3String("None")
params.append(param)
param = ATOM3String("1")
params.append(param)
obj.setValue(("Boolean", "ATOM3Boolean", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
obj.setValue(("Integer", "ATOM3Integer", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
obj.setValue(("Float", "ATOM3Float", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
param = ATOM3String("self.types")
params.append(param)
obj.setValue(("Attribute", "ATOM3Attribute", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
param = ATOM3String("[1,1,1,self.types]")
params.append(param)
param = ATOM3String("ATOM3Attribute")
params.append(param)
param = ATOM3String("self.types")
params.append(param)
obj.setValue(("List", "ATOM3List", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
param = ATOM3String("[]")
params.append(param)
param = ATOM3String("1")
params.append(param)
param = ATOM3String("1")
params.append(param)
obj.setValue(("Enum", "ATOM3Enum", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
obj.setValue(("Constraint", "ATOM3Constraint", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
param = ATOM3String("'class0'")
params.append(param)
param = ATOM3String("None")
params.append(param)
obj.setValue(("Appearance", "ATOM3Appearance", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
obj.setValue(("BottomType", "ATOM3BottomType", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
obj.setValue(("Link", "ATOM3Link", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
obj.setValue(("Port", "ATOM3Port", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
obj.setValue(("Connection", "ATOM3Connection", params, (None, 0) ))
objs.append(obj)
self.typeList.setValue(objs)
| gpl-3.0 | -6,969,317,930,008,179,000 | 35.246201 | 121 | 0.644524 | false | 3.345345 | false | false | false |
josephdunn/stackpy | stackpy/url.py | 1 | 6161 | from json import loads
from urllib import quote, urlencode
from urllib2 import urlopen, HTTPError
from zlib import decompress, MAX_WBITS
import api
from database import Database
from filter import Filter
## Represents an error that occurred while accessing the API.
class APIError(Exception):
## Constructs the exception object.
def __init__(self, error_id, error_message):
self._error_id = error_id
self._error_message = error_message
## Returns a string representation of the exception
# @return the error string
def __str__(self):
return 'API error %d: %s.' % (self._error_id, self._error_message,)
## Returns the unique ID for the error.
# @return the unique ID
def error_id(self):
return self._error_id
## Represents a %URL for accessing an API resource.
#
# The URL class provides methods for manipulating a %URL that will eventually be
# used to access an API method. There is rarely a need to interact with this
# class directly - instead use the methods of API and Site.
class URL:
## Constructs a URL object optionally initialized to a domain.
# @param domain a site domain name
def __init__(self, domain=None):
self._prefix = 'http'
self._method = 'GET'
self._methods = []
self._base_methods = []
self._parameters = {}
# Add two default parameters to accompany each request
self._parameters['key'] = api.API.key
self._parameters['filter'] = Filter.default
if not domain is None:
self._parameters['site'] = domain
self._ttl = 600 # cache data for 10 minutes by default
## Returns an internal representation of the URL.
# @return the internal representation
def __repr__(self):
return "<%s request for '%s'>" % (self._method,
'/'.join(self._methods),)
## Constructs the string representation of the URL.
# @return the complete URL as a string
def __str__(self):
return '%s://api.stackexchange.com/2.1/%s%s' % (self._prefix,
'/'.join(self._methods),
'?' + urlencode(self._parameters) if self._method == 'GET' else '',)
## Retrieves the JSON data for the provided URL.
# @param forbid_empty raises an error if fewer than one item is returned
# @returns the JSON response
#
# This method will generate the URL for the request and either retrieve the
# JSON for that URL or return the latest value from the cache.
def fetch(self, forbid_empty=False):
url = str(self)
# If caching is enabled for this URL, check the cache
if self._ttl:
Database.prepare()
json_data = Database.current.retrieve_from_cache(url)
if not json_data is None:
return loads(json_data)
# Catch any HTTP errors because we want to grab error messages
try:
post_data = urlencode(self._parameters) if self._method == 'POST' else None
raw_data = urlopen(url, data=post_data).read()
except HTTPError, e:
raw_data = e.read()
json_data = decompress(raw_data, 16 + MAX_WBITS).decode('UTF-8')
data = loads(json_data)
# Check the data for errors
if 'error_id' in data and 'error_message' in data:
raise APIError(data['error_id'], data['error_message'])
if not 'items' in data:
raise KeyError('"items" missing from server response.')
# Add it to the cache for next time
if self._ttl:
Database.current.add_to_cache(url, json_data, self._ttl)
# If the caller wants at least one item, make sure there is
if forbid_empty and not len(data['items']):
raise IndexError('"items" is empty but at least one item was expected.')
return data
## Adds a method to the end of the URL.
# @param method the name of the method
# @param is_variable whether this 'method' can vary between requests
#
# A bit of an explanation for this method seems in order. The `is_variable`
# parameter indicates whether this particular part of the method is
# constant or if it represents an ID or tag or some other variant.
def add_method(self, method, is_variable=False):
self._methods.append(quote(method, ''))
self._base_methods.append('*' if is_variable else method)
return self
## Adds a query string parameter to the URL.
# @param name the name of the parameter
# @param value the value for the parameter
#
# Note: if a parameter with the same name already exists, it will be replaced. Also,
# if name is set to 'access_token', then the URL will switch to HTTPS.
def add_parameter(self, name, value):
self._parameters[name] = str(value)
if name == 'access_token':
self.secure()
return self
## Returns the base method used for the request.
# @return the base method
#
# The return value of this method is used extensively in the meta type
# system Stack.PY employs as well as observing the rate limit.
def base_method(self):
return '/'.join(self._base_methods)
## Enables the secure HTTP protocol (HTTPS) for the URL.
def secure(self):
self._prefix = 'https'
return self
## Sets the Time-To-Live (TTL) for this request.
# @param ttl the TTL value for the URL
#
# Note: passing a value of 0 for ttl will result in caching being disabled for the URL
def set_ttl(self, ttl):
self._ttl = ttl
return self
## Switches the URL to a POST request instead of a GET request.
#
# Note: this will disable caching
def switch_to_post(self):
self._method = 'POST'
self._ttl = 0
return self
| mit | 377,028,218,894,038,200 | 39.348993 | 124 | 0.59828 | false | 4.366407 | false | false | false |
nerdoc/bp_mgmt | bp_mgmt/urls.py | 1 | 1317 | """bp_mgmt URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth.views import login
from bp_setup.forms import BPAuthenticationForm
admin.site.site_header = 'Administration: BP Allgemeinmedizin'
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^setup/', include('bp_setup.urls', namespace='bp_setup')),
url(r'^info/', include('django.contrib.flatpages.urls')),
url(r'^api/', include('bp_cupid.api')),
url(r'^/?', include('bp_cupid.urls', namespace='bp_cupid')),
url(r'^login/$', login,
{'authentication_form': BPAuthenticationForm},
name='login',
),
url('^', include('django.contrib.auth.urls')),
]
| agpl-3.0 | -8,803,623,451,032,898,000 | 36.628571 | 77 | 0.680334 | false | 3.403101 | false | false | false |
woutdenolf/spectrocrunch | spectrocrunch/pipelines/run.py | 1 | 1620 | # -*- coding: utf-8 -*-
import logging
import multiprocessing
import time
from ..utils import timing
logger = logging.getLogger(__name__)
def run_sequential(tasks, name=None):
"""
Args:
tasks(list(NXtask))
Returns:
bool
"""
with timing.timeit_logger(logger, name=name):
for task in tasks:
task.run()
if not task.done:
return False
return True
def run_task(task, result_queue):
"""
Args:
task(NXtask)
result_queue(Queue)
"""
try:
task.run()
finally:
result_queue.put(task.done)
def run_parallel(tasks, name, nproc=2):
"""
Args:
tasks(list(NXtask))
Returns:
bool
"""
with timing.timeit_logger(logger, name=name):
result_queue = multiprocessing.Queue()
results = []
with multiprocessing.Pool(nproc) as pool:
try:
while tasks:
tasks_copy = list(tasks)
tasks = []
for task in tasks_copy:
if task.ready_to_run:
results.append(
pool.apply_async(run_task, task, result_queue)
)
else:
tasks.append(task)
if not result_queue.get():
for result in results:
result.wait()
return
finally:
for result in results:
result.wait()
| mit | 5,956,943,999,785,022,000 | 23.545455 | 78 | 0.455556 | false | 4.682081 | false | false | false |
jayceyxc/hue | desktop/core/ext-py/Django-1.6.10/tests/custom_managers/models.py | 49 | 2068 | """
23. Giving models a custom manager
You can use a custom ``Manager`` in a particular model by extending the base
``Manager`` class and instantiating your custom ``Manager`` in your model.
There are two reasons you might want to customize a ``Manager``: to add extra
``Manager`` methods, and/or to modify the initial ``QuerySet`` the ``Manager``
returns.
"""
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
# An example of a custom manager called "objects".
class PersonManager(models.Manager):
def get_fun_people(self):
return self.filter(fun=True)
@python_2_unicode_compatible
class Person(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
fun = models.BooleanField(default=False)
objects = PersonManager()
def __str__(self):
return "%s %s" % (self.first_name, self.last_name)
# An example of a custom manager that sets get_queryset().
class PublishedBookManager(models.Manager):
def get_queryset(self):
return super(PublishedBookManager, self).get_queryset().filter(is_published=True)
@python_2_unicode_compatible
class Book(models.Model):
title = models.CharField(max_length=50)
author = models.CharField(max_length=30)
is_published = models.BooleanField(default=False)
published_objects = PublishedBookManager()
authors = models.ManyToManyField(Person, related_name='books')
def __str__(self):
return self.title
# An example of providing multiple custom managers.
class FastCarManager(models.Manager):
def get_queryset(self):
return super(FastCarManager, self).get_queryset().filter(top_speed__gt=150)
@python_2_unicode_compatible
class Car(models.Model):
name = models.CharField(max_length=10)
mileage = models.IntegerField()
top_speed = models.IntegerField(help_text="In miles per hour.")
cars = models.Manager()
fast_cars = FastCarManager()
def __str__(self):
return self.name
| apache-2.0 | 6,083,510,562,268,266,000 | 30.815385 | 89 | 0.715667 | false | 3.739602 | false | false | false |
openstack/heat | heat/common/crypt.py | 1 | 6949 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import sys
from cryptography import fernet
from cryptography.hazmat import backends
from cryptography.hazmat.primitives.ciphers import algorithms
from cryptography.hazmat.primitives.ciphers import Cipher
from cryptography.hazmat.primitives.ciphers import modes
from cryptography.hazmat.primitives import padding
from oslo_config import cfg
from oslo_serialization import jsonutils
from oslo_utils import encodeutils
from heat.common import exception
from heat.common.i18n import _
auth_opts = [
cfg.StrOpt('auth_encryption_key',
secret=True,
default='notgood but just long enough i t',
help=_('Key used to encrypt authentication info in the '
'database. Length of this key must be 32 characters.'))
]
cfg.CONF.register_opts(auth_opts)
class SymmetricCrypto(object):
"""Symmetric Key Crypto object.
This class creates a Symmetric Key Crypto object that can be used
to decrypt arbitrary data.
Note: This is a reimplementation of the decryption algorithm from
oslo-incubator, and is provided for backward compatibility. Once we have a
DB migration script available to re-encrypt using new encryption method as
part of upgrade, this can be removed.
:param enctype: Encryption Cipher name (default: AES)
"""
def __init__(self, enctype='AES'):
self.algo = algorithms.AES
def decrypt(self, key, msg, b64decode=True):
"""Decrypts the provided ciphertext.
The ciphertext can be optionally base64 encoded.
Uses AES-128-CBC with an IV by default.
:param key: The Encryption key.
:param msg: the ciphetext, the first block is the IV
:returns: the plaintext message, after padding is removed.
"""
key = str.encode(get_valid_encryption_key(key))
if b64decode:
msg = base64.b64decode(msg)
algo = self.algo(key)
block_size_bytes = algo.block_size // 8
iv = msg[:block_size_bytes]
backend = backends.default_backend()
cipher = Cipher(algo, modes.CBC(iv), backend=backend)
decryptor = cipher.decryptor()
padded = (decryptor.update(msg[block_size_bytes:]) +
decryptor.finalize())
unpadder = padding.ANSIX923(algo.block_size).unpadder()
plain = unpadder.update(padded) + unpadder.finalize()
# The original padding algorithm was a slight variation on ANSI X.923,
# where the size of the padding did not include the byte that tells
# you the size of the padding. Therefore, we need to remove one extra
# byte (which will be 0x00) when unpadding.
return plain[:-1]
def encrypt(value, encryption_key=None):
if value is None:
return None, None
encryption_key = get_valid_encryption_key(encryption_key, fix_length=True)
encoded_key = base64.b64encode(encryption_key.encode('utf-8'))
sym = fernet.Fernet(encoded_key)
res = sym.encrypt(encodeutils.safe_encode(value))
return 'cryptography_decrypt_v1', encodeutils.safe_decode(res)
def decrypt(method, data, encryption_key=None):
if method is None or data is None:
return None
decryptor = getattr(sys.modules[__name__], method)
value = decryptor(data, encryption_key)
if value is not None:
return encodeutils.safe_decode(value, 'utf-8')
def encrypted_dict(data, encryption_key=None):
'Return an encrypted dict. Values converted to json before encrypted'
return_data = {}
if not data:
return return_data
for prop_name, prop_value in data.items():
prop_string = jsonutils.dumps(prop_value)
encrypted_value = encrypt(prop_string, encryption_key)
return_data[prop_name] = encrypted_value
return return_data
def decrypted_dict(data, encryption_key=None):
'Return a decrypted dict. Assume input values are encrypted json fields.'
return_data = {}
if not data:
return return_data
for prop_name, prop_value in data.items():
method, value = prop_value
try:
decrypted_value = decrypt(method, value, encryption_key)
except UnicodeDecodeError:
# The dict contained valid JSON on the way in, so if what comes
# out is garbage then the key was incorrect.
raise exception.InvalidEncryptionKey()
prop_string = jsonutils.loads(decrypted_value)
return_data[prop_name] = prop_string
return return_data
def oslo_decrypt_v1(value, encryption_key=None):
encryption_key = get_valid_encryption_key(encryption_key)
sym = SymmetricCrypto()
return sym.decrypt(encryption_key, value, b64decode=True)
def cryptography_decrypt_v1(value, encryption_key=None):
encryption_key = get_valid_encryption_key(encryption_key, fix_length=True)
encoded_key = base64.b64encode(encryption_key.encode('utf-8'))
sym = fernet.Fernet(encoded_key)
try:
return sym.decrypt(encodeutils.safe_encode(value))
except fernet.InvalidToken:
raise exception.InvalidEncryptionKey()
def get_valid_encryption_key(encryption_key, fix_length=False):
if encryption_key is None:
encryption_key = cfg.CONF.auth_encryption_key
if fix_length and len(encryption_key) < 32:
# Backward compatible size
encryption_key = encryption_key * 2
return encryption_key[:32]
def heat_decrypt(value, encryption_key=None):
"""Decrypt data that has been encrypted using an older version of Heat.
Note: the encrypt function returns the function that is needed to
decrypt the data. The database then stores this. When the data is
then retrieved (potentially by a later version of Heat) the decrypt
function must still exist. So whilst it may seem that this function
is not referenced, it will be referenced from the database.
"""
encryption_key = str.encode(get_valid_encryption_key(encryption_key))
auth = base64.b64decode(value)
AES = algorithms.AES(encryption_key)
block_size_bytes = AES.block_size // 8
iv = auth[:block_size_bytes]
backend = backends.default_backend()
cipher = Cipher(AES, modes.CFB(iv), backend=backend)
decryptor = cipher.decryptor()
return decryptor.update(auth[block_size_bytes:]) + decryptor.finalize()
def list_opts():
yield None, auth_opts
| apache-2.0 | 6,933,252,646,719,187,000 | 36.160428 | 78 | 0.691466 | false | 3.964062 | false | false | false |
rothadamg/UPSITE | Utils/InteractionXML/DeleteElements.py | 2 | 4537 | import sys, os, copy
thisPath = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.abspath(os.path.join(thisPath,"..")))
sys.path.append(os.path.abspath(os.path.join(thisPath,"../..")))
import Utils.ElementTreeUtils as ETUtils
from collections import defaultdict
import types
def getEmptyCorpus(xml, deletionRules=None, removeNames=False):
"""
A convenience function for getting an empty corpus, useful for testing for information leaks
in the event extraction process.
"""
if type(xml) in types.StringTypes:
# XML is read from disk, so it's a new copy and can be safely modified
xml = ETUtils.ETFromObj(xml)
else:
# XML is already an object in memory. To prevent problems with other users of it, a copy
# is created before deleting elements.
xml = copy.deepcopy(xml)
if deletionRules == None: # use default rules for BioNLP Shared Task
# We remove all interactions, and all entities that are not named entities. This leaves only
# the gold standard protein/gene names
if removeNames:
deletionRules = {"interaction":{},"entity":{}}
else:
deletionRules = {"interaction":{},"entity":{"given":(None, "False")}}
# Remove elements and return the emptied XML
return processCorpus(xml, None, deletionRules)
def removeElements(parent, rules, reverse, countsByType):
toRemove = []
for element in parent:
attrType = {}
if element.tag in rules:
remove = True
if rules[element.tag] != None and len(rules[element.tag]) > 0:
for attrName in rules[element.tag]:
if element.get(attrName) not in rules[element.tag][attrName]:
remove = False
break
else:
if attrName not in attrType:
attrType[attrName] = set()
attrType[attrName].add(element.get(attrName))
if reverse:
remove = not remove
if remove:
toRemove.append(element)
countsByType[element.tag + " " + str(attrType)] += 1
else:
removeElements(element, rules, reverse, countsByType)
for element in toRemove:
parent.remove(element)
def processCorpus(inputFilename, outputFilename, rules, reverse=False):
print >> sys.stderr, "Deleting elements, rules =", rules
print >> sys.stderr, "Loading corpus file", inputFilename
corpusTree = ETUtils.ETFromObj(inputFilename)
corpusRoot = corpusTree.getroot()
countsByType = defaultdict(int)
removeElements(corpusRoot, rules, reverse, countsByType)
print >> sys.stderr, "Deleted elements"
for k in sorted(countsByType.keys()):
print >> sys.stderr, " " + k + ":", countsByType[k]
if outputFilename != None:
print >> sys.stderr, "Writing output to", outputFilename
ETUtils.write(corpusRoot, outputFilename)
return corpusTree
if __name__=="__main__":
print >> sys.stderr, "##### Delete Elements #####"
from optparse import OptionParser
# Import Psyco if available
try:
import psyco
psyco.full()
print >> sys.stderr, "Found Psyco, using"
except ImportError:
print >> sys.stderr, "Psyco not installed"
optparser = OptionParser(usage="%prog [options]\nPath generator.")
optparser.add_option("-i", "--input", default=None, dest="input", help="Corpus in interaction xml format", metavar="FILE")
optparser.add_option("-o", "--output", default=None, dest="output", help="Output file in interaction xml format.")
optparser.add_option("-r", "--rules", default=None, dest="rules", help="dictionary of python dictionaries with attribute:value pairs.")
optparser.add_option("-v", "--reverse", default=False, dest="reverse", action="store_true", help="")
(options, args) = optparser.parse_args()
if options.input == None:
print >> sys.stderr, "Error, input file not defined."
optparser.print_help()
sys.exit(1)
if options.output == None:
print >> sys.stderr, "Error, output file not defined."
optparser.print_help()
sys.exit(1)
# Rules e.g. "{\"pair\":{},\"interaction\":{},\"entity\":{\"given\":\"False\"}}"
rules = eval(options.rules)
print >> sys.stderr, "Rules:", rules
processCorpus(options.input, options.output, rules, options.reverse)
| mit | 8,528,157,482,372,606,000 | 42.209524 | 143 | 0.622658 | false | 4.124545 | false | false | false |
Pirata-Repository/Pirata | plugin.video.movie25/resources/libs/plugins/tvrelease.py | 1 | 14008 | import urllib, urllib2,re,string,sys,os
import xbmc, xbmcgui, xbmcaddon, xbmcplugin
from resources.libs import main
from t0mm0.common.addon import Addon
addon_id = 'plugin.video.movie25'
selfAddon = xbmcaddon.Addon(id=addon_id)
addon = Addon(addon_id, sys.argv)
art = main.art
error_logo = art+'/bigx.png'
BASEURL = 'http://www.tv-release.net/'
prettyName = 'TVRelease'
def MAINMENU():
main.addDir('Search Tv-Release', BASEURL+'?seacher=', 1006,art+'/tvrsearch1.png')
main.addDir('TV 480', BASEURL+'?cat=TV-480p', 1001,art+'/TV480.png')
main.addDir('TV 720', BASEURL+'?cat=TV-720p', 1001,art+'/TV720.png')
main.addDir('TV MP4', BASEURL+'?cat=TV-Mp4', 1001,art+'/TVmp4.png')
main.addDir('TV Xvid', BASEURL+'?cat=TV-XviD', 1001,art+'/TVxvid.png')
#main.addDir('TV Packs', BASEURL+'category/tvshows/tvpack/', 1007,art+'/TVpacks.png')
main.addDir('TV Foreign', BASEURL+'?cat=TV-Foreign', 1001,art+'/TVforeign.png')
main.addDir('Movies 480', BASEURL+'?cat=Movies-480p', 1001,art+'/Movies480.png')
main.addDir('Movies 720', BASEURL+'?cat=Movies-720p', 1001,art+'/Movies720.png')
main.addDir('Movies Xvid', BASEURL+'?cat=Movies-XviD', 1001,art+'/Moviesxvid.png')
main.addDir('Movies Foreign', BASEURL+'?cat=Movies-Foreign', 1001,art+'/Moviesforeign.png')
main.addSpecial('Resolver Settings',BASEURL, 1004,art+'/tvrresolver.png')
main.VIEWSB()
def INDEX(url):
types = []
SearchType = None
if '!' in url:
r = url.rpartition('!')
print r
url = r[0]
SearchType = r[2]
else:
url = url
if 'cat=TV' in url:
types = 'tv'
elif 'cat=Movies' in url:
types = 'movie'
html = GETHTML(url)
if html == None:
return
pattern = '<tr><td[^>]*?><a [^>]*?>([^<]*?)</a></td><td[^>]*?><a href=\'([^\']*?)\'[^>]*?>([^<]*?)<'
r = re.findall(pattern, html, re.I|re.M|re.DOTALL)
dialogWait = xbmcgui.DialogProgress()
ret = dialogWait.create('Please wait until list is cached.')
totalLinks = len(r)
loadedLinks = 0
remaining_display = 'Media loaded :: [B]'+str(loadedLinks)+' / '+str(totalLinks)+'[/B].'
dialogWait.update(0,'[B]Will load instantly from now on[/B]',remaining_display)
for tag, url, name in r:
url = BASEURL + url
if re.search('(?i)WEB-DL',name): tag = tag.strip() + " WEB-DL"
if re.findall('\d{4}p', name):
r = re.findall('(.+?)\s(\d+p)', name)
for name, quality in r:
tag = tag.replace('720p',quality)
pass
if re.findall('\ss\d+e\d+\s', name, re.I|re.DOTALL):
r = re.findall('(.+?)\ss(\d+)e(\d+)\s', name, re.I)
for name, season, episode in r:
name = name+' S'+season+'E'+episode
elif re.findall('\s\d{4}\s\d{2}\s\d{2}\s', name):
r = re.findall('(.+?)\s(\d{4})\s(\d{2})\s(\d{2})\s',name)
for name, year, month, day in r:
name = name+' '+year+' '+month+' '+day
elif re.findall('\shdtv\sx', name, re.I):
r = re.findall('(.+?)\shdtv\sx',name, re.I)
for name in r:
pass
name = re.sub('\s\s+',' ',name).strip()
name = name+' [COLOR red]'+re.sub('(?sim)^(TV-|Movies-)(.*)','\\2',tag)+'[/COLOR]'
if SearchType == None:
if 'TV' in tag:
main.addDirTE(name,url,1003,'','','','','','')
elif 'Movies' in tag:
if re.findall('\s\d+\s',name):
r = name.rpartition('\s\d{4}\s')
main.addDirM(name,url,1003,'','','','','','')
elif SearchType == 'tv' and 'TV' in tag:
main.addDirTE(name,url,1003,'','','','','','')
elif SearchType == 'movie' and 'Movies' in tag:
r = name.rpartition('\s\d{4}\s')
main.addDirM(name,url,1003,'','','','','','')
loadedLinks = loadedLinks + 1
percent = (loadedLinks * 100)/totalLinks
remaining_display = 'Media loaded :: [B]'+str(loadedLinks)+' / '+str(totalLinks)+'[/B].'
dialogWait.update(percent,'[B]Will load instantly from now on[/B]',remaining_display)
if dialogWait.iscanceled(): return False
dialogWait.close()
del dialogWait
if "<div class='zmg_pn'" in html:
r = re.findall("""<span class='zmg_pn_current'>(\d+?)</span>[^<]*?<span class='zmg_pn_standar'><a href="([^"]+?)">""", html, re.I|re.DOTALL|re.M)
total = re.findall('">(\d+)</a></span>', html)
if total: total = total[-1]
else: total = "1"
for current, url in r:
url = BASEURL + url
name = 'Page '+current+' of '+total+' [COLOR green]Next Page >>>[/COLOR]'
main.addDir('[COLOR green]Go to Page[/COLOR]', url+':'+total, 1002, art+'/gotopagetr.png')
main.addDir(name, url.replace('%5C',''), 1001, art+'/nextpage.png')
main.VIEWS()
def LISTHOSTERS(name,url):
html = GETHTML(url)
if html == None: return
if selfAddon.getSetting("hide-download-instructions") != "true":
main.addLink("[COLOR red]For Download Options, Bring up Context Menu Over Selected Link.[/COLOR]",'','')
r = re.findall(r'class="td_cols"><a target=\'_blank\'.+?href=\'(.+?)\'>',html, re.M|re.DOTALL)
try:
t = re.findall(r'rel="nofollow">((?!.*\.rar).*)</a>', html, re.I)
r = r+t
except: pass
if len(r) == 0:
addon.show_ok_dialog(['No Playable Streams Found,','It Might Be That They Are Still Being Uploaded,',
'Or They Are Unstreamable Archive Files'],'MashUP: TV-Release')
xbmcplugin.endOfDirectory(int(sys.argv[1]), False, False)
return
from urlparse import urlparse
for url in r:
url = url.replace("180upload.nl","180upload.com")
host = urlparse(url).hostname.replace('www.','').partition('.')[0]
if main.supportedHost(host):
main.addDown2(name.strip()+" [COLOR blue]"+host.upper()+"[/COLOR]",url,1005,art+'/hosts/'+host+'.png',art+'/hosts/'+host+'.png')
def superSearch(encode,type):
try:
if type == 'Movies': cat = 'Movies-XviD,Movies-720p,Movies-480p,Movies-Foreign,Movies-DVDR,'
else: cat = 'TV-XviD,TV-Mp4,TV-720p,TV-480p,TV-Foreign,'
surl ='http://tv-release.net/?s='+encode+'&cat='+cat
returnList=[]
link=main.OPENURL(surl,verbose=False)
link=link.replace('\r','').replace('\n','').replace('\t','').replace(' ','')
pattern = '<tr><td[^>]*?><a [^>]*?>([^<]*?)</a></td><td[^>]*?><a href=\'([^\']*?)\'[^>]*?>([^<]*?)<'
r = re.findall(pattern, link, re.I|re.M|re.DOTALL)
for tag, url, name in r:
url = BASEURL + url
if re.search('(?i)WEB-DL',name): tag = tag.strip() + " WEB-DL"
if re.findall('\d+p\s', name):
r = re.findall('(.+?)\s(\d+p)\s', name)
for name, quality in r:
tag = tag.replace('720p',quality)
pass
if re.findall('\ss\d+e\d+\s', name, re.I|re.DOTALL):
r = re.findall('(.+?)\ss(\d+)e(\d+)\s', name, re.I)
for name, season, episode in r:
name = name+' S'+season+'E'+episode
elif re.findall('\s\d{4}\s\d{2}\s\d{2}\s', name):
r = re.findall('(.+?)\s(\d{4})\s(\d{2})\s(\d{2})\s',name)
for name, year, month, day in r:
name = name+' '+year+' '+month+' '+day
elif re.findall('\shdtv\sx', name, re.I):
r = re.findall('(.+?)\shdtv\sx',name, re.I)
for name in r:
pass
name = name+' [COLOR red]'+re.sub('(?sim)^(TV-|Movies-)(.*)','\\2',tag)+'[/COLOR]'
returnList.append((name,prettyName,url,'',1003,True))
return returnList
except: return []
def SEARCHhistory():
dialog = xbmcgui.Dialog()
ret = dialog.select('[B]Choose A Search Type[/B]',['[B]TV Shows[/B]','[B]Movies[/B]'])
if ret == -1:
xbmcplugin.endOfDirectory(int(sys.argv[1]), False, False)
if ret == 0:
searchType = 'tv'
seapath=os.path.join(main.datapath,'Search')
SeaFile=os.path.join(seapath,'SearchHistoryTv')
if not os.path.exists(SeaFile):
SEARCH(searchType)
else:
main.addDir('Search',searchType,1008,art+'/search.png')
main.addDir('Clear History',SeaFile,128,art+'/cleahis.png')
thumb=art+'/link.png'
searchis=re.compile('search="(.+?)",').findall(open(SeaFile,'r').read())
for seahis in reversed(searchis):
url=seahis
seahis=seahis.replace('%20',' ')
url = 'http://tv-release.net/?s='+url+'&cat=TV-XviD,TV-Mp4,TV-720p,TV-480p,TV-Foreign,'
main.addDir(seahis,url,1001,thumb)
if ret == 1:
searchType = 'movie'
seapath=os.path.join(main.datapath,'Search')
SeaFile=os.path.join(seapath,'SearchHistory25')
if not os.path.exists(SeaFile):
SEARCH(searchType)
else:
main.addDir('Search',searchType,1008,art+'/search.png')
main.addDir('Clear History',SeaFile,128,art+'/cleahis.png')
thumb=art+'/link.png'
searchis=re.compile('search="(.+?)",').findall(open(SeaFile,'r').read())
for seahis in reversed(searchis):
url=seahis
seahis=seahis.replace('%20',' ')
url = 'http://tv-release.net/?s='+url+'&cat=Movies-XviD,Movies-720p,Movies-480p,Movies-Foreign,Movies-DVDR,'
main.addDir(seahis,url,1001,thumb)
def SEARCH(murl):
if murl == 'tv':
encode = main.updateSearchFile(murl,'TV',defaultValue=murl,searchMsg='Search For Shows or Episodes')
if not encode: return False
url = 'http://tv-release.net/?s='+encode+'&cat=TV-XviD,TV-Mp4,TV-720p,TV-480p,TV-Foreign,'
INDEX(url)
elif murl=='movie':
encode = main.updateSearchFile(murl,'Movies',defaultValue=murl,searchMsg='Search For Movies')
if not encode: return False
url = 'http://tv-release.net/?s='+encode+'&cat=Movies-XviD,Movies-720p,Movies-480p,Movies-Foreign,Movies-DVDR,'
INDEX(url)
def TVPACKS(url):
html = GETHTML(url)
if html == None:
return
pattern = '(?sim)Tv/Pack</a></span>.+?<a href="([^"]+?)"><b><font size="2px">([^<]+?)<'
r = re.findall(pattern,html)
for url, name in r:
main.addDir(name, url, 1001,'')
def GOTOP(url):
default = url
r = url.rpartition(':')
url = re.findall('^(.+page=)\d+(.*)$',r[0])
total = r[2]
keyboard = xbmcgui.Dialog().numeric(0, '[B][I]Goto Page Number[/B][/I]')
if not keyboard:
xbmcplugin.endOfDirectory(int(sys.argv[1]), False, False)
return False
if int(keyboard) > int(total) or keyboard == '0':
addon.show_ok_dialog(['Please Do Not Enter a Page Number bigger than',''+total+', Enter A Number Between 1 and '+total+'',
''], 'MashUP: TV-Release')
GOTOP(default)
url = url[0][0]+keyboard+str(url[0][1])
INDEX(url)
def PLAYMEDIA(name,url):
ok = True
r = re.findall(r'(.+?)\[COLOR', name)
name = r[0]
r=re.findall('Season(.+?)Episode([^<]+)',name)
if r:
infoLabels =main.GETMETAEpiT(name,'','')
video_type='episode'
season=infoLabels['season']
episode=infoLabels['episode']
else:
infoLabels =main.GETMETAT(name,'','','')
video_type='movie'
season=''
episode=''
img=infoLabels['cover_url']
fanart =infoLabels['backdrop_url']
imdb_id=infoLabels['imdb_id']
infolabels = { 'supports_meta' : 'true', 'video_type':video_type, 'name':str(infoLabels['title']), 'imdb_id':str(infoLabels['imdb_id']), 'season':str(season), 'episode':str(episode), 'year':str(infoLabels['year']) }
try:
xbmc.executebuiltin("XBMC.Notification(Please Wait!,Resolving Link,3000)")
stream_url = main.resolve_url(url)
infoL={'Title': infoLabels['title'], 'Plot': infoLabels['plot'], 'Genre': infoLabels['genre']}
# play with bookmark
from resources.universal import playbackengine
player = playbackengine.PlayWithoutQueueSupport(resolved_url=stream_url, addon_id=addon_id, video_type=video_type, title=str(infoLabels['title']),season=str(season), episode=str(episode), year=str(infoLabels['year']),img=img,infolabels=infoL, watchedCallbackwithParams=main.WatchedCallbackwithParams,imdb_id=imdb_id)
#WatchHistory
if selfAddon.getSetting("whistory") == "true":
from resources.universal import watchhistory
wh = watchhistory.WatchHistory(addon_id)
wh.add_item(name+' '+'[COLOR=FF67cc33]TvRelease[/COLOR]', sys.argv[0]+sys.argv[2], infolabels=infolabels, img=str(img), fanart=str(fanart), is_folder=False)
player.KeepAlive()
return ok
except:
return ok
def GETHTML(url):
try:
h = main.OPENURL(url)
if '<h2>Under Maintenance</h2>' in h:
addon.show_ok_dialog(['[COLOR=FF67cc33][B]TV-Release is Down For Maintenance,[/COLOR][/B]',
'[COLOR=FF67cc33][B]Please Try Again Later[/COLOR][/B]',''],'MashUP: TV-Release')
xbmcplugin.endOfDirectory(int(sys.argv[1]), False, False)
return
return h
except urllib2.URLError, e:
addon.show_small_popup('MashUP: Tv-Release','TV-Release Web Site Failed To Respond, Check Log For Details', 9000, error_logo)
addon.log_notice(str(e))
xbmcplugin.endOfDirectory(int(sys.argv[1]), False, False)
return
| gpl-2.0 | -329,808,216,355,989,440 | 47.137457 | 324 | 0.549543 | false | 3.224678 | false | false | false |
joedougherty/sentential | sentential/KnowledgeBase.py | 1 | 2970 | # -*- coding: utf-8 -*-
from collections import OrderedDict
from .Expression import expressify
from .Proof import Proof
from .ProofGraph import ProofGraph
from .Proposition import Proposition
from .rewrite_rules import cnf, group_cnf, negate
class KnowledgeBase:
def __init__(self):
self._axioms = OrderedDict()
self._goal = None
self.run_proofs = list()
def add_axiom(self, axiom_as_prop, axiom_is_goal=False):
if axiom_is_goal:
cnf_exp = cnf(negate(expressify(axiom_as_prop)))
else:
cnf_exp = cnf(expressify(axiom_as_prop))
self._axioms[len(self._axioms)] = {'proposition': axiom_as_prop,
'cnf': cnf_exp,
'clauses': group_cnf(cnf_exp),
'is_goal': axiom_is_goal}
def add_goal(self, goal_as_prop):
if self._goal:
raise Exception("{} is already defined as the goal! Use .remove_goal() before adding a new goal.".format(self._goal))
if not isinstance(goal_as_prop, Proposition):
raise Exception("Goal must be of the type Proposition!")
self.add_axiom(goal_as_prop, axiom_is_goal=True)
self._goal = goal_as_prop
self._goal_as_unit_clause = group_cnf(cnf(expressify(goal_as_prop)))
self._negated_goal_as_unit_clause = group_cnf(cnf(negate(expressify(goal_as_prop))))
def remove_goal(self):
self._goal, self._goal_as_unit_clause, self._negated_goal_as_unit_clause = None, None, None
for k, v in self._axioms.items():
if v.get('is_goal') == True:
del self._axioms[k]
def _gather_clauses(self):
clause_collection = []
for idx, axiom in self._axioms.items():
clause_collection = clause_collection + axiom.get('clauses')
return clause_collection
def most_recent_proof(self):
if self.run_proofs:
return self.run_proofs[-1]
else:
raise Exception("Run a proof first!")
def prove(self, goal=None, return_proof=False):
if goal is None:
if not hasattr(self, '_goal_as_unit_clause'):
raise Exception("You must set a goal before running a proof!")
goal = self._goal_as_unit_clause
else:
self.remove_goal()
self.add_goal(goal)
negated_goal = self._negated_goal_as_unit_clause
proof_attempt = Proof(goal, negated_goal, self._gather_clauses())
self.run_proofs.append(proof_attempt)
if return_proof:
return proof_attempt
try:
return proof_attempt.find(trace=True)
except Exception as e:
print(e)
print('Clause Collection: {}'.format(proof_attempt.clause_collection))
print('Set of Support: {}'.format(proof_attempt.set_of_support))
return proof_attempt
| mit | -3,057,124,202,246,682,000 | 35.219512 | 129 | 0.582155 | false | 3.613139 | false | false | false |
adinlead/MongoDB2MySQL | src/DBUtils.py | 1 | 6814 | # coding=utf8
from pandas import json
import MySQLdb
from pymongo import MongoClient
class MongoHolder():
mongodb = None
collection = None
def initMongoDB(self, uri, port, dbname):
client = MongoClient(uri, port, maxPoolSize=200, connectTimeoutMS=60 * 1000, socketTimeoutMS=60 * 1000)
self.mongodb = client[dbname]
def readMongoTable(self, page, limit):
return self.mongodb[self.collection].find().skip(page * limit).limit(limit)
def countMongoDB(self):
return self.mongodb[self.collection].count()
class MySQLHolder():
mysql = None
collection = None
mysql_db = None # mySQL数据库名称
text_column = []
unique_column = []
def initMySql(self, host, port, user, passwd, dbname):
self.mysql = MySQLdb.connect(
host=host,
port=port,
user=user,
passwd=passwd,
db=dbname,
charset="utf8"
)
def createMySqlTable(self, tableName):
base_sql = 'CREATE TABLE `%s` (`_idx_` INT NOT NULL AUTO_INCREMENT,PRIMARY KEY (`_idx_`))DEFAULT CHARSET=utf8'
cursor = self.mysql.cursor()
cursor.execute(base_sql % tableName)
data = cursor.fetchone()
return data
def createMySqlFieldToTable(self, tableName, fieldName, fieldType, default='', unique=False):
try:
if unique:
sql = 'ALTER TABLE `%s` ADD COLUMN `%s` %s %s,' \
'ADD UNIQUE INDEX `%s_UNIQUE` (`%s` ASC)' % (
tableName, fieldName, fieldType, default, fieldName, fieldName)
else:
sql = 'ALTER TABLE `%s` ADD COLUMN `%s` %s %s;' % (tableName, fieldName, fieldType, default)
cursor = self.mysql.cursor()
cursor.execute(sql)
data = cursor.fetchone()
return data
except Exception, e:
pass
def executeSQL(self, sql):
cursor = self.mysql.cursor()
cursor.execute(sql)
data = cursor.fetchone()
return data
def executeSQL(self, sql, param):
param = tuple(param)
cursor = self.mysql.cursor()
cursor.execute(sql, param)
data = cursor.fetchone()
return data
def executeInsterSQL(self, tableName, key_arr, pla_arr, val_arr):
val_arr = tuple(val_arr)
sql = 'INSERT INTO %s (%s) VALUES(%s)' % (tableName, key_arr, pla_arr)
try:
cursor = self.mysql.cursor()
cursor.execute(sql, val_arr)
pass
except:
if not self.hasMySqlTableForDB(tableName):
self.createMySqlTable(tableName)
tabKetArr = self.getMySqlFieldNameByTable(tableName)
key_list = key_arr.split(',')
for i in range(0, len(key_list)):
key = key_list[i]
naked = key.replace('`', '')
if naked == 'key' or naked == 'id' or naked == '_id':
unique = True
else:
unique = False
if (naked,) not in tabKetArr:
if isinstance(val_arr[i], int):
self.createMySqlFieldToTable(tableName, naked, 'INT(11)', unique=unique)
elif isinstance(val_arr[i], float) or isinstance(val_arr[i], long):
self.createMySqlFieldToTable(tableName, naked, 'DOUBLE', unique=unique)
elif naked in self.text_column: # 检查特殊字段(TEXT)
self.createMySqlFieldToTable(tableName, naked, 'TEXT', unique=unique)
else:
self.createMySqlFieldToTable(tableName, naked, 'VARCHAR(256)', unique=unique)
cursor = self.mysql.cursor()
try:
cursor.execute(sql, val_arr)
except Exception, e:
if e[0] == 1062:
return
cursor.execute(sql, val_arr)
self.mysql.commit()
def executeInsterSQLOfMultiterm(self, tableName, key_arr, pla_arr, val_arr_list):
val_arr = val_arr_list[0]
for i in range(0, len(val_arr_list)):
val_arr_list[i] = tuple(val_arr_list[i])
val_arrs = tuple(val_arr_list)
sql = 'INSERT INTO %s (%s) VALUES(%s)' % (tableName, key_arr, pla_arr)
try:
cursor = self.mysql.cursor()
cursor.executemany(sql, val_arrs)
except:
if not self.hasMySqlTableForDB(tableName):
self.createMySqlTable(tableName)
tabKetArr = self.getMySqlFieldNameByTable(tableName)
key_list = key_arr.split(',')
for i in range(0, len(key_list)):
key = key_list[i]
naked = key.replace('`', '')
if naked in unique_column:
unique = True
else:
unique = False
if naked not in tabKetArr:
if isinstance(val_arr[i], int):
self.createMySqlFieldToTable(tableName, naked, 'INT(11)', unique=unique)
elif isinstance(val_arr[i], float) or isinstance(val_arr[i], long):
self.createMySqlFieldToTable(tableName, naked, 'DOUBLE', unique=unique)
elif 'dra' in naked or 'summary' in naked:
self.createMySqlFieldToTable(tableName, naked, 'TEXT', unique=unique)
else:
self.createMySqlFieldToTable(tableName, naked, 'VARCHAR(256)', unique=unique)
cursor = self.mysql.cursor()
cursor.executemany(sql, val_arrs)
self.mysql.commit()
def getMySqlFieldNameByTable(self, tableName):
base_sql = "select COLUMN_NAME from information_schema.COLUMNS where table_name = '%s' and table_schema = '%s'"
cursor = self.mysql.cursor()
cursor.execute(base_sql % (tableName, self.mysql_db))
data = cursor.fetchall()
return data
def getMySqlTableName(self):
base_sql = "SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = '%s'"
cursor = self.mysql.cursor()
cursor.execute(base_sql % (self.mysql_db))
data = cursor.fetchall()
return data
def hasMySqlTableForDB(self, tableName):
base_sql = "SELECT COUNT(TABLE_NAME) FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = '%s' AND TABLE_NAME='%s'"
cursor = self.mysql.cursor()
cursor.execute(base_sql % (self.mysql_db, tableName))
data = cursor.fetchone()
return data[0] > 0
| gpl-3.0 | -2,931,272,471,846,969,000 | 39.163636 | 122 | 0.541814 | false | 3.974254 | false | false | false |
eesatfan/vuplus-enigma2 | lib/python/Plugins/SystemPlugins/DeviceManager/plugin.py | 1 | 69207 | from Components.Label import Label
from Components.ActionMap import ActionMap
from Components.config import config, ConfigSelection, getConfigListEntry, ConfigSubsection, ConfigEnableDisable, ConfigYesNo, ConfigInteger
from Components.ConfigList import ConfigListScreen
from Components.Console import Console
from Components.GUIComponent import GUIComponent
from Components.Harddisk import harddiskmanager
from Components.MenuList import MenuList
from Components.Pixmap import Pixmap, MultiPixmap
from Components.Sources.List import List
from Components.Sources.StaticText import StaticText
from Plugins.Plugin import PluginDescriptor
from Screens.MessageBox import MessageBox
from Screens.Screen import Screen
from Screens.VirtualKeyBoard import VirtualKeyBoard
from Tools.BoundFunction import boundFunction
from Tools.LoadPixmap import LoadPixmap
from Tools.Notifications import AddNotificationWithCallback
from Tools.Directories import pathExists, fileExists, resolveFilename, SCOPE_PLUGINS, SCOPE_CURRENT_PLUGIN, SCOPE_CURRENT_SKIN, SCOPE_METADIR
from skin import loadSkin
from os import system, makedirs, path, listdir, statvfs, popen
import os
import re
import time
from Components.Sources.StaticText import StaticText
from Components.FileList import FileList
from Screens.InputBox import InputBox
from Components.Input import Input
from Screens.ChoiceBox import ChoiceBox
from enigma import eTimer
from __init__ import _
config.plugins.devicemanager = ConfigSubsection()
config.plugins.devicemanager.hotplug_enable = ConfigEnableDisable(default=True)
config.plugins.devicemanager.mountcheck_enable = ConfigEnableDisable(default=True)
def readFile(filename):
file = open(filename)
data = file.read().strip()
file.close()
return data
def byteConversion(byte):
if type(byte) == str and len(byte) == 0:
return ""
if type(byte) != long:
byte = long(byte)
if byte > 1024*1024*1024:
int_part = byte/1024/1024/1024
dec_part = byte%(1024*1024*1024)/(1024*1024)
return "%d.%d GB"%(int_part, dec_part)
else:
int_part = byte/1024/1024
dec_part = byte%(1024*1024)/1024
return "%d.%d MB"%(int_part, dec_part)
def checkStrValue(value , empty = ""):
if type(value) != str or len(value) == 0:
return empty
return value
class DeviceManagerConfiguration(Screen, ConfigListScreen):
def __init__(self,session):
self.session = session
Screen.__init__(self,session)
self.skinName = "Setup"
self.createConfigList()
ConfigListScreen.__init__(self, self.list, session = self.session)
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("OK"))
self["shortcuts"] = ActionMap(["ShortcutActions", "SetupActions" ],
{
"ok": self.keySave,
"cancel": self.keyCancel,
"red": self.keyCancel,
"green": self.keySave,
}, -2)
self.onShown.append(self.setWindowTitle)
self.old_hotplug_enable = config.plugins.devicemanager.hotplug_enable.value
def setWindowTitle(self):
self.setTitle(_("DeviceManager configuration"))
def createConfigList(self):
self.list = []
self.list.append(getConfigListEntry(_("Enable mount check for HDD : "), config.plugins.devicemanager.mountcheck_enable))
self.list.append(getConfigListEntry(_("Harddisk standby after : "), config.usage.hdd_standby))
self.list.append(getConfigListEntry(_("Mount known devices automatically : "), config.plugins.devicemanager.hotplug_enable))
def keySave(self):
if config.plugins.devicemanager.hotplug_enable.value:
if not DeviceManagerhotplugDeviceStart in harddiskmanager.on_partition_list_change:
harddiskmanager.on_partition_list_change.append(DeviceManagerhotplugDeviceStart)
else:
if DeviceManagerhotplugDeviceStart in harddiskmanager.on_partition_list_change:
harddiskmanager.on_partition_list_change.remove(DeviceManagerhotplugDeviceStart)
for x in self["config"].list:
x[1].save()
self.close()
class DeviceManager(Screen):
skin = """
<screen position="center,center" size="590,350" title="DeviceManager">
<ePixmap pixmap="skin_default/buttons/red.png" position="20,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="160,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="300,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/blue.png" position="440,0" size="140,40" alphatest="on" />
<widget name="key_red" position="20,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#9f1313" transparent="1" />
<widget name="key_green" position="160,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#1f771f" transparent="1" />
<widget name="key_yellow" position="300,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#a08500" transparent="1" />
<widget name="key_blue" position="440,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#18188b" transparent="1" />
<ePixmap pixmap="skin_default/div-h.png" position="0,48" size="590,2" alphatest="on" />
<widget source="menu" render="Listbox" position="0,48" size="590,350" scrollbarMode="showOnDemand">
<convert type="TemplatedMultiContent">
{"templates":
{"default": (54,[
MultiContentEntryText(pos = (50, 0), size = (510, 30), font=0, flags = RT_HALIGN_LEFT|RT_VALIGN_CENTER, text = 0), # index 0 is vendor - model
MultiContentEntryText(pos = (50, 32), size = (120, 20), font=1, flags = RT_HALIGN_LEFT|RT_VALIGN_CENTER, text = 1), # index 1 is Device
MultiContentEntryText(pos = (170, 32), size = (120, 20), font=1, flags = RT_HALIGN_LEFT|RT_VALIGN_CENTER, text = 2), # index 2 is Size
MultiContentEntryText(pos = (290, 32), size = (120, 20), font=1, flags = RT_HALIGN_LEFT|RT_VALIGN_CENTER, text = 3), # index 3 is Partitions
MultiContentEntryText(pos = (410, 32), size = (130, 20), font=1, flags = RT_HALIGN_LEFT|RT_VALIGN_CENTER, text = 4), # index 4 is Removable
MultiContentEntryPixmapAlphaTest(pos = (0, 52), size = (590, 2), png = 5), # png 5 is the div pixmap
]),
"partitions": (98, [
MultiContentEntryText(pos = (50, 0), size = (500, 30), font=0, flags = RT_HALIGN_LEFT|RT_VALIGN_CENTER, text = 0), # index 1 is Partition
MultiContentEntryText(pos = (50, 32), size = (500, 20), font=1, flags = RT_HALIGN_LEFT|RT_VALIGN_CENTER, text = 1), # index 2 is Mounted on
MultiContentEntryText(pos = (50, 54), size = (500, 20), font=1, flags = RT_HALIGN_LEFT|RT_VALIGN_CENTER, text = 2), # index 3 UUID
MultiContentEntryText(pos = (50, 76), size = (130, 20), font=1, flags = RT_HALIGN_LEFT|RT_VALIGN_CENTER, text = 3), # index 4 Type
MultiContentEntryText(pos = (180, 76), size = (130, 20), font=1, flags = RT_HALIGN_LEFT|RT_VALIGN_CENTER, text = 4), # index 5 Size_total
MultiContentEntryText(pos = (310, 76), size = (190, 20), font=1, flags = RT_HALIGN_LEFT|RT_VALIGN_CENTER, text = 5), # index 6 Size_free
MultiContentEntryPixmapAlphaTest(pos = (0, 96), size = (590, 2), png = 6), # png 6 is the div pixmap
]),
"mountpoint": (54,[
MultiContentEntryPixmapAlphaTest(pos = (10, 7), size = (30, 30), png = 0), # index 0: picture
MultiContentEntryText(pos = (40, 0), size = (500, 30), font=0, flags = RT_HALIGN_LEFT|RT_VALIGN_CENTER, text = 1), # index 1 name
MultiContentEntryText(pos = (40, 32), size = (500, 20), font=1, flags = RT_HALIGN_LEFT|RT_VALIGN_CENTER, text = 2), # index 2 path
MultiContentEntryPixmapAlphaTest(pos = (0, 52), size = (590, 2), png = 5), # index 5 is the div pixmap
])
},
"fonts": [gFont("Regular", 22),gFont("Regular", 16),gFont("Regular", 28)],
"itemHeight": 54
}
</convert>
</widget>
</screen>
"""
def __init__(self, session):
Screen.__init__(self, session)
self.session = session
self.currList = "default"
self.currDevice = None
self.currPartition = None
self.defaultMountPoint = "/media/hdd"
self.deviceList = []
self["menu"] = List(self.deviceList)
self["key_red"] = Label(_("Close"))
self["key_green"] = Label(" ")
self["key_yellow"] = Label(" ")
self["key_blue"] = Label(" ")
self["shortcuts"] = ActionMap(["ShortcutActions", "SetupActions", "MenuActions" ],
{
"ok": self.keyOk,
"cancel": self.keyCancel,
"red": self.keyCancel,
"green": self.keyOk,
"yellow": self.keyYellow,
"blue": self.keyBlue,
"menu": self.keyMenu,
}, -2)
self.DeviceManagerConsole = Console()
self.loadIcon()
if not self.selectionChanged in self["menu"].onSelectionChanged:
self["menu"].onSelectionChanged.append(self.selectionChanged)
self.onLayoutFinish.append(self.showDeviceList)
self.onLayoutFinish.append(self.addPartitionListChange)
self.onClose.append(self.removePartitionListChange)
self.onChangedEntry = []
self.blockDevices = {}
def addPartitionListChange(self):
harddiskmanager.on_partition_list_change.append(self.partitionListChanged)
def removePartitionListChange(self):
harddiskmanager.on_partition_list_change.remove(self.partitionListChanged)
def partitionListChanged(self, action, device):
print "[Device manager] hotplug partitionListChanged"
if self.currList != "default" and device.device[:3] != self.currDevice["blockdev"]:
return
self.showDeviceList()
def loadIcon(self):
self.icon_button_green = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/buttons/button_green.png"))
self.divpng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/div-h.png"))
def selectionChanged(self):
if self.currList == "partitions":
currentPartition = self.getCurrentPartition()
if currentPartition is not None:
if currentPartition["mountpoint"] != "":
self["key_green"].setText(_("Umount"))
else:
self["key_green"].setText(_("Mount"))
if currentPartition["fstype"] == "":
self["key_blue"].setText("")
elif currentPartition["fstype"][:3] == "ext":
self["key_blue"].setText(_("Check"))
else:
self["key_blue"].setText("")
def showDeviceList(self):
self.deviceList = []
self["key_red"].setText(_("Close"))
self["key_green"].setText(_("Ok"))
self["key_yellow"].setText(" ")
self["key_blue"].setText(_("Initialize"))
deviceinfo.refresh()
for device in deviceinfo.getBlockDevices():
deviceEntry = (
"%s - %s"%(device["vendor"], device["model"]), # vendor : str, model : str, index 0
_("device : %s")%(device["blockdev"]), # str
_("Size : %s")%(byteConversion(device["size"])), # str, bytes
_("Partitions : %s")%(len(device["partitions"])), # list
_("Removable : %s")%(device["removable"] and 'Yes' or 'No'), # bool [True, False]
self.divpng, # png 5
device, # index 6
)
# print "[DeviceManager] deviceEntry : ", deviceEntry
self.deviceList.append(deviceEntry)
self.currList = "default"
self["menu"].style = "default"
self["menu"].setList(self.deviceList)
def showPartitionList(self):
if self.currDevice is None:
return
partitionList = []
for partition in self.currDevice["partitions"]:
partitionInfo = deviceinfo.getPartitionInfo(partition)
partitionEntry = (
_("Partition : /dev/%s")%partition, # index 0
_("Mounted on : %s")%checkStrValue(partitionInfo["mountpoint"], _("not mounted")),
_("UUID : %s")%checkStrValue(partitionInfo["uuid"], _("unknown")),
_("Type : %s")%checkStrValue(partitionInfo["fstype"], _("unknown")),
_("Size : %s")%checkStrValue(byteConversion(partitionInfo["size"]), _("unknown")),
_("Free : %s")%checkStrValue(byteConversion(partitionInfo["free"]), _("unknown")),
self.divpng, # index 6
partitionInfo, # index 7
)
# print "[DeviceManager] partitionEntry : ",partitionEntry
partitionList.append(partitionEntry)
if len(partitionList) != 0:
self["key_red"].setText(_("Devices"))
self["key_green"].setText(_("Mount"))
self["key_yellow"].setText(_("Format"))
self["key_blue"].setText(_("Check"))
self.currList = "partitions"
self["menu"].style = "partitions"
self["menu"].setList(partitionList)
self.selectionChanged()
else:
self.session.open(MessageBox, _("No partition list found on device.\nPlease click BLUE key and do Initialize to use this device."), MessageBox.TYPE_ERROR, timeout = 10)
def showMountPointSetup(self):
if self.currDevice is None or self.currPartition is None:
return
partition = self.currPartition["partition"]
if not os.access("/autofs/%s"%partition,0):
self.session.open(MessageBox, _("This partition is not mountable.\nYou need to check or format this partition."), MessageBox.TYPE_ERROR, timeout = 10)
return
self["key_red"].setText(_("Partitions"))
self["key_green"].setText(_("Ok"))
self["key_yellow"].setText("")
self["key_blue"].setText("")
self.mountPointList = []
currentMountPoint = self.currPartition["mountpoint"]
if currentMountPoint == "":
currentMountPoint = "'not mounted'"
defaultMountPoint = self.getDefaultMountPoint()
autoMountPoint = self.getAutoMountPoint()
defaultMountPointEntry = (self.icon_button_green, _("Set up Default Mount Point"), _("Mount Point : %s ->%s")%(currentMountPoint, defaultMountPoint), "default", defaultMountPoint, self.divpng)
autoMountPointEntry = (self.icon_button_green, _("Automatically set up a Mount Point"), _("Mount Point : %s -> %s")%(currentMountPoint, autoMountPoint), "auto", autoMountPoint, self.divpng)
manuallyMountPointEntry = (self.icon_button_green, _("User manually Set up a Mount Point"), _("Mount Point : click ok button on here."), "manual", "", self.divpng)
if not path.ismount(defaultMountPoint):
self.mountPointList.append(defaultMountPointEntry)
self.mountPointList.append(autoMountPointEntry)
self.mountPointList.append(manuallyMountPointEntry)
self.currList = "mountpoint"
self["menu"].style = "mountpoint"
self["menu"].setList(self.mountPointList)
def getCurrentDevice(self):
try:
return self["menu"].getCurrent()[6]
except:
return None
def getCurrentPartition(self):
try:
return self["menu"].getCurrent()[7]
except:
return None
def keyOk(self):
# print "keyOk"
if self.currList == "default":
self.currDevice = self.getCurrentDevice()
if self.currDevice is not None:
if len(self.currDevice["partitions"]) == 0:
self.session.open(MessageBox, _("No partition list found on device.\nPlease click BLUE key and do Initialize to use this device."), MessageBox.TYPE_ERROR, timeout = 10)
else:
self.showPartitionList()
else:
self.session.open(MessageBox, _("Device not found."), MessageBox.TYPE_ERROR, timeout = 10)
elif self.currList == "partitions":
currentPartition = self.getCurrentPartition()
if currentPartition is not None:
currMountPoint = currentPartition["mountpoint"]
currUuid = currentPartition["uuid"]
if currMountPoint == "":
self.currPartition = currentPartition
self.showMountPointSetup()
else:
self.doUmount(currMountPoint, self.showPartitionList)
else:
self.session.open(MessageBox, _("Partition info is not found."), MessageBox.TYPE_ERROR, timeout = 10)
elif self.currList == "mountpoint":
# self["menu"].getCurrent() : (green_button, "menu description", "mount point description, "default", mountpoint, self.divpng)
currEntry = self["menu"].getCurrent()[3]
if currEntry == "default":
# print "Setup mountpoint default!"
self.doMount(self.currPartition, self["menu"].getCurrent()[4])
elif currEntry == "auto":
# print "Setup mountpoint automatically!"
self.doMount(self.currPartition, self["menu"].getCurrent()[4])
else:
# print "Setup mountpoint manually!"
self.session.openWithCallback(self.MountpointBrowserCB, MountpointBrowser)
else:
pass
def keyCancel(self):
# print "keyCancel"
if self.DeviceManagerConsole is not None:
if len(self.DeviceManagerConsole.appContainers):
for name in self.DeviceManagerConsole.appContainers.keys():
self.DeviceManagerConsole.kill(name)
if self.currList == "partitions":
self.currDevice = None
self.showDeviceList()
elif self.currList == "mountpoint":
self.currPartition = None
self.showPartitionList()
else: # currList = "default"
self.close()
def keyYellow(self):
if self.currList == "partitions":
self.choiceBoxFstype()
def keyBlue(self):
if self.currList == "default":
device = self.getCurrentDevice()
if device is not None:
self.session.openWithCallback(self.deviceInitCB, DeviceInit, device["blockdev"], device["size"])
else:
self.session.open(MessageBox, _("Device not found."), MessageBox.TYPE_ERROR, timeout = 10)
elif self.currList == "partitions":
partition = self.getCurrentPartition()
if partition is not None:
self.session.openWithCallback(self.deviceCheckCB, DeviceCheck, partition)
else:
self.session.open(MessageBox, _("Partition info is not found."), MessageBox.TYPE_ERROR, timeout = 10)
def keyMenu(self):
self.session.open(DeviceManagerConfiguration)
def deviceInitCB(self, ret = True):
self.showDeviceList()
def deviceCheckCB(self, ret = True):
self.showPartitionList()
def deviceFormatCB(self, ret = True):
self.showPartitionList()
def choiceBoxFstype(self):
menu = []
menu.append((_("ext2 - recommended for USB flash memory"), "ext2"))
menu.append((_("ext3 - recommended for harddisks"), "ext3"))
menu.append((_("ext4 - experimental"), "ext4"))
menu.append((_("vfat - for USB flash memory"), "vfat"))
self.session.openWithCallback(self.choiceBoxFstypeCB, ChoiceBox, title=_("Choice filesystem."), list=menu)
def choiceBoxFstypeCB(self, choice):
if choice is None:
return
else:
partition = self.getCurrentPartition()
if partition is not None:
self.session.openWithCallback(self.deviceFormatCB, DeviceFormat, partition, choice[1])
else:
self.session.open(MessageBox, _("Partition info is not found."), MessageBox.TYPE_ERROR, timeout = 10)
# about mount funcs..
def doUmount(self, mountpoint, callback):
cmd = "umount %s"%mountpoint
print "[DeviceManager] cmd : %s"%cmd
os.system(cmd)
if not path.ismount(mountpoint):
devicemanagerconfig.updateConfigList()
else:
self.session.open(MessageBox, _("Can't umount %s. \nMaybe device or resource busy.")%mountpoint, MessageBox.TYPE_ERROR, timeout = 10)
callback()
def getDefaultMountPoint(self):
return self.defaultMountPoint
def getAutoMountPoint(self):
mountPoint = "/media/"+self.currDevice["model"]
mountPoint = mountPoint.replace(' ','-')
if path.ismount(mountPoint):
partnum = 2
while 1:
mountPoint_fix = mountPoint+str(partnum)
if not path.ismount(mountPoint_fix):
break
partnum +=1
mountPoint = mountPoint_fix
return mountPoint
def doMount(self, partition, mountpoint):
try:
# check mountpoint is in partition list.
if mountpoint != self.getDefaultMountPoint():
for p in harddiskmanager.partitions:
if p.mountpoint == mountpoint:
self.session.open(MessageBox, _("Can not use this mount point.(%s) \nPlease select another mount point.")%mountpoint, MessageBox.TYPE_ERROR, timeout = 10)
return
#
device = partition["partition"]
filesystem = partition["fstype"]
uuid = partition["uuid"]
if mountpoint.endswith("/"):
mountpoint = retval[:-1]
if mountpoint.find(' ') != -1:
mountpoint = mountpoint.replace(' ','-')
devpath = "/dev/"+device
if deviceinfo.isMounted(devpath, mountpoint):
print "[DeviceManager] '%s -> %s' is already mounted."%(devpath, mountpoint)
return
# check current device mounted on another mountpoint.
mp_list = deviceinfo.checkMountDev(devpath)
for mp in mp_list:
if mp != mountpoint and path.ismount(mp):
deviceinfo.umountByMountpoint(mp)
# check another device mounted on configmountpoint
devpath_list = deviceinfo.checkMountPoint(mountpoint)
for devpath_ in devpath_list:
if devpath_ != devpath:
self.session.open(MessageBox, _("Mount Failed!\nCurrent path is already mounted by \"%s\"")%devpath_list[0], MessageBox.TYPE_ERROR, timeout = 10)
return
# do mount
print "[DeviceManagerHotplugDevice] doMount"
if not path.exists(mountpoint):
os.system("mkdir %s"%mountpoint)
if path.exists(mountpoint):
if not path.ismount(mountpoint):
if filesystem == "ntfs":
cmd = "ntfs-3g %s %s"%(devpath, mountpoint)
elif filesystem is None:
cmd = "mount %s %s"%(devpath, mountpoint)
else:
cmd = "mount -t %s %s %s"%(filesystem, devpath, mountpoint)
print "[DeviceManager] cmd : %s"%cmd
self.DeviceManagerConsole.ePopen(cmd, self.doMountFinished, (devpath, mountpoint) )
except:
self.session.open(MessageBox, _("Mount Failed!\n(%s -> %s)")%(device, mountpoint), MessageBox.TYPE_ERROR, timeout = 10)
def doMountFinished(self, result, retval, extra_args = None):
(devpath, mountpoint) = extra_args
if retval == 0:
if not deviceinfo.isMounted(devpath, mountpoint):
# print "[DeviceManager] %s doMount failed!"%devpath
self.session.open(MessageBox, _("Mount Failed!\n(%s -> %s)")%(devpath, mountpoint), MessageBox.TYPE_ERROR, timeout = 10)
return
else:
# make movie directory
if mountpoint == "/media/hdd":
movieDir = mountpoint + "/movie"
if not pathExists(movieDir):
print "[DeviceManager] make dir %s"%movieDir
os.makedirs(movieDir)
self.showPartitionList()
# update current mount state ,devicemanager.cfg
devicemanagerconfig.updateConfigList()
def MountpointBrowserCB(self, retval = None):
if retval and retval is not None:
mountPoint = retval.strip().replace(' ','')
if retval.endswith("/"):
mountPoint = retval[:-1]
print "Mount point from MountpointBrowser : %s"%mountPoint
if not path.exists(mountPoint):
self.session.open(MessageBox, _("Mount Point is not writeable.\nPath : %s")%mountPoint, MessageBox.TYPE_ERROR, timeout = 10)
else:
self.doMount(self.currPartition, mountPoint)
# mount funcs end..
# Initializing Start...
class DeviceInit(Screen):
skin = """<screen position="0,0" size="0,0"/>"""
def __init__(self, session, device, devicesize):
Screen.__init__(self, session)
self.session = session
self.deviceInitConsole = Console()
self.device = device
self.devicesize = int(devicesize)
self.inputbox_partitions = 1
self.inputbox_partitionSizeList = []
self.inputbox_partitionSizeTotal = int(self.devicesize/1024/1024)
self.msgWaiting = None
self.msgWaitingMkfs = None
self.devicenumber = 0
self.newpartitions = 0
self.onLayoutFinish.append(self.timerStart)
self.initStartTimer = eTimer()
self.initStartTimer.callback.append(self.confirmMessage)
self.createFSStartTimer = eTimer()
self.createFSStartTimer.callback.append(self.createFilesystemStart)
self.exitMessageTimer = eTimer()
self.exitMessageTimer.callback.append(self.exitMessage)
self.msg = ""
self.fstype = None
self.mkfs_cmd = ""
self.doMkfsTimer = eTimer()
self.doMkfsTimer.callback.append(self.doMkfs)
self.doInitializeTimer = eTimer()
self.doInitializeTimer.callback.append(self.doInitialize)
self.partitionType = "MBR"
self.maxPartNum = 4
self.inputbox_partitionSizeRemain = self.inputbox_partitionSizeTotal
self.unit = "MB"
def timerStart(self):
self.initStartTimer.start(100,True)
def confirmMessage(self):
message = _("Do you really want to initialize the device?\nAll data on the device will be lost!")
self.session.openWithCallback(self.confirmed, MessageBox, message)
def confirmed(self, ret):
if ret:
self.InitializeStart()
else:
self.exit()
def exit(self, ret = True):
self.close()
def unmountAll(self, device):
mounts = file('/proc/mounts').read().split('\n')
cmd = ""
# umount all
for line in mounts:
if not line.startswith("/dev/" + device):
continue
cmd += "umount %s ;"% line.split()[0]
print "[DeviceManager] %s"%cmd
os.system(cmd)
#recheck if umounted
mounts = file('/proc/mounts').read().split('\n')
for line in mounts:
if line.startswith("/dev/" + device):
return False
return True
def InitializeStart(self):
if self.devicesize >= ( 2.2 * 1000 * 1000 * 1000 * 1000 ): # 2.2TB
self.partitionType = "GPT"
self.maxPartNum = 20
self.inputbox_partitionSizeRemain = 100
self.unit = "%"
self.InputPartitionSize_step1()
def InputPartitionSize_step1(self):
self.session.openWithCallback(self.InputPartitionSize_step1_CB, InputBox, title=_("How many partitions do you want?(1-%d)" % self.maxPartNum), text="1", maxSize=False, type=Input.NUMBER)
def InputPartitionSize_step1_CB(self, ret):
if ret is not None and int(ret) in range(1,self.maxPartNum+1): # MBR 1~4, GPT 1~20
self.inputbox_partitions = int(ret)
self.InputPartitionSize_step2()
else:
self.session.openWithCallback(self.exit, MessageBox, _("The number you entered is wrong!"), MessageBox.TYPE_ERROR, timeout = 10)
def InputPartitionSize_step2(self):
current_partition = len(self.inputbox_partitionSizeList)+1
if self.inputbox_partitionSizeRemain == 0:
self.choiceBoxFstype()
elif current_partition == self.inputbox_partitions:
self.inputbox_partitionSizeList.append(str(self.inputbox_partitionSizeRemain))
self.choiceBoxFstype()
else:
text = str(int(self.inputbox_partitionSizeRemain/(self.inputbox_partitions-len(self.inputbox_partitionSizeList) )))
self.session.openWithCallback(self.InputPartitionSize_step2_CB, InputBox, title=_("Input size of partition %s.(Unit = %s, Max = %d %s)")%(current_partition, self.unit, self.inputbox_partitionSizeRemain, self.unit), text=text, maxSize=False, type=Input.NUMBER)
def InputPartitionSize_step2_CB(self, ret):
if ret is not None:
if self.inputbox_partitionSizeRemain < int(ret) or int(ret) == 0:
self.InputPartitionSize_step2()
else:
self.inputbox_partitionSizeList.append(str(ret))
self.inputbox_partitionSizeRemain -= int(ret)
self.InputPartitionSize_step2()
else:
self.session.openWithCallback(self.exit ,MessageBox, _("The number you entered is wrong!"), MessageBox.TYPE_ERROR, timeout = 10)
def choiceBoxFstype(self):
menu = []
menu.append((_("ext2 - recommended for USB flash memory"), "ext2"))
menu.append((_("ext3 - recommended for harddisks"), "ext3"))
menu.append((_("ext4 - experimental"), "ext4"))
menu.append((_("vfat - for USB flash memory"), "vfat"))
self.session.openWithCallback(self.choiceBoxFstypeCB, ChoiceBox, title=_("Choice filesystem."), list=menu)
def choiceBoxFstypeCB(self, choice):
if choice is None:
self.exit()
else:
self.fstype = choice[1]
if self.fstype not in ["ext2", "ext3", "ext4", "vfat"]:
self.exit()
else:
self.initInitializeConfirm()
def initInitializeConfirm(self):
# print self.inputbox_partitionSizeList
partitionsInfo = ""
for index in range(len(self.inputbox_partitionSizeList)):
print "partition %d : %s %s"%(index+1, str(self.inputbox_partitionSizeList[index]), self.unit)
partitionsInfo += "partition %d : %s %s\n"%(index+1, str(self.inputbox_partitionSizeList[index]), self.unit)
partitionsInfo += "filesystem type : %s"%(self.fstype)
self.session.openWithCallback(self.initInitializeConfirmCB, MessageBoxConfirm, _("%s\nStart Device Inititlization?") % partitionsInfo , MessageBox.TYPE_YESNO)
def initInitializeConfirmCB(self,ret):
if ret:
self.initInitialize()
else:
self.exit()
def initInitialize(self):
if not self.unmountAll(self.device):
self.session.openWithCallback(self.exit, MessageBox, _("umounting failed!Maybe some files in mount point are open"), MessageBox.TYPE_ERROR, timeout = 10)
else:
msg = _("InitInitializing, please wait ...")
msg += _("\nDevice : %s")%self.device
msg += _("\nSize : %s MB\n")%self.inputbox_partitionSizeTotal
for index in range(len(self.inputbox_partitionSizeList)):
msg += _("\npartition %d : %s %s")%(index+1, str(self.inputbox_partitionSizeList[index]), self.unit)
self.msgWaiting = self.session.openWithCallback(self.msgWaitingCB, MessageBox_2, msg, type = MessageBox.TYPE_INFO, enable_input = False)
self.doInitializeTimer.start(500,True)
def doInitialize(self):
def CheckPartedVer():
cmd = 'parted --version'
lines = os.popen(cmd).readlines()
for l in lines:
if l.find("parted (GNU parted)") != -1:
ver = l.split()[3].strip()
break
try:
ver = float(ver)
except:
print "[CheckPartedVer] check parted version Failed!"
return 0
return ver
partitions = len(self.inputbox_partitionSizeList) # get num of partition
set = ""
if self.partitionType == "MBR":
if partitions == 1:
cmd = 'printf "8,\n;0,0\n;0,0\n;0,0\ny\n" | sfdisk -f -uS /dev/' + self.device
else:
for p in range(4):
if partitions > p+1:
set += ",%s\n"%(self.inputbox_partitionSizeList[p])
else:
set +=";\n"
set+="y\n"
cmd = 'printf "%s" | sfdisk -f -uM /dev/%s'%(set,self.device)
elif self.partitionType == "GPT": # partition type is GPT
setAlign = ""
partedVer = CheckPartedVer()
if partedVer >= 2.1: # align option is supported in version 2.1 or later
setAlign = "--align optimal"
if partitions == 1:
cmd = 'parted %s /dev/%s --script mklabel gpt mkpart disk ext2 0%% 100%%' % (setAlign, self.device)
else: # has multiple partitions
p_current = 0
for p in range(partitions):
if p == 0:
p_start = p_current
p_end = int( (long(self.inputbox_partitionSizeList[p]) * 100) / 100 )
p_current = p_end
elif p > 0 and partitions > (p + 1):
p_start = p_current
p_end = int( (long(self.inputbox_partitionSizeList[p]) * 100) / 100 )+ p_start
p_current = p_end
elif partitions == (p + 1):
p_start = p_current
p_end = 100
if p_start == p_end:
p_end +=1
if p_end > 100:
continue
set += 'mkpart disk%d ext2 %d%% %d%% ' % (p + 1, p_start, p_end)
cmd = 'parted %s /dev/%s --script mklabel gpt %s' % (setAlign, self.device, set)
else:
errorMsg = "Invalid partitioning type"
self.msgWaiting.run_close(False, errorMsg)
return
self.deviceInitConsole.ePopen(cmd, self.initInitializeFinished)
def initInitializeFinished(self, result, retval, extra_args = None):
if retval == 0:
if self.partitionType == "MBR":
cmd = "sfdisk -R /dev/%s ; sleep 5" % (self.device)
else: # is GPT
cmd = "sleep 5"
self.deviceInitConsole.ePopen(cmd, self.initInitializingRefreshFinished)
else:
errorMsg = "initInitializing device Error at /dev/%s"%self.device
self.msgWaiting.run_close(False, errorMsg)
def initInitializingRefreshFinished(self, result, retval, extra_args = None):
cmd = "/bin/umount /dev/%s*" % (self.device)
self.deviceInitConsole.ePopen(cmd, self.initInitializingUmountFinished)
def initInitializingUmountFinished(self, result, retval, extra_args = None):
partitions = open("/proc/partitions")
self.devicenumber = 0
self.newpartitions = 0
for part in partitions:
res = re.sub("\s+", " ", part).strip().split(" ")
if res and len(res) == 4 and res[3][:3] == self.device:
if len(res[3]) > 3 and res[3][:2] == "sd":
self.newpartitions += 1
partitions.close()
partNum = len(self.inputbox_partitionSizeList) # get num of partition
if self.newpartitions != partNum:
errorMsg = "Partitioning device Error at /dev/%s"%self.device
self.msgWaiting.run_close(False, errorMsg)
else:
self.msgWaiting.run_close(True)
# self.createFilesystem(self.newpartitions)
def createFilesystem(self, newpartitions):
self.devicenumber = self.devicenumber + 1
fulldevicename = "/dev/" + self.device + str(self.devicenumber)
shortdevicename = self.device + str(self.devicenumber)
# get partition size
partitions = open("/proc/partitions")
for part in partitions:
res = re.sub("\s+", " ", part).strip().split(" ")
if res and len(res) == 4:
if res[3] == shortdevicename:
partitionsize = int(res[2])
break
partitions.close()
if self.fstype == "ext4":
cmd = "/sbin/mkfs.ext4 -F "
if partitionsize > 2 * 1024 * 1024: # 2GB
cmd += "-T largefile "
cmd += "-O extent,flex_bg,large_file,uninit_bg -m1 " + fulldevicename
elif self.fstype == "ext3":
cmd = "/sbin/mkfs.ext3 -F "
if partitionsize > 2 * 1024 * 1024:
cmd += "-T largefile "
cmd += "-m0 " + fulldevicename
elif self.fstype == "ext2":
cmd = "/sbin/mkfs.ext2 -F "
if partitionsize > 2 * 1024 * 1024:
cmd += "-T largefile "
cmd += "-m0 " + fulldevicename
elif self.fstype == "vfat":
if partitionsize > 4 * 1024 * 1024 * 1024:
cmd = "/usr/sbin/mkfs.vfat -I -S4096 " + fulldevicename
else:
cmd = "/usr/sbin/mkfs.vfat -I " + fulldevicename
else:
self.createFilesystemFinished(None, -1, (self.device, fulldevicename))
return
msg = _("Create filesystem, please wait ...")
msg += _("\nPartition : %s") % (fulldevicename)
msg += _("\nFilesystem : %s") % (self.fstype)
msg += _("\nDisk Size : %s MB") % (self.inputbox_partitionSizeTotal)
msg += _("\nPartition Size : %d %s\n") % (int(self.inputbox_partitionSizeList[self.devicenumber-1]), self.unit)
self.msgWaitingMkfs = self.session.openWithCallback(self.msgWaitingMkfsCB, MessageBox_2, msg, type = MessageBox.TYPE_INFO, enable_input = False)
self.mkfs_cmd = cmd
self.doMkfsTimer.start(500,True)
def doMkfs(self):
fulldevicename = "/dev/" + self.device + str(self.devicenumber)
self.deviceInitConsole.ePopen(self.mkfs_cmd, self.createFilesystemFinished, (self.device, fulldevicename))
def createFilesystemFinished(self, result, retval, extra_args = None):
device = extra_args[0]
fulldevicename = extra_args[1]
if retval == 0:
self.msgWaitingMkfs.run_close(True)
else:
errorMsg = _("Creating filesystem Error")
if fulldevicename is not None:
errorMsg += _(" at /dev/%s")%fulldevicename
self.msgWaitingMkfs.run_close(False, errorMsg)
def createFilesystemStart(self):
self.createFilesystem(self.newpartitions)
def msgWaitingCB(self, ret, msg=""):
if ret:
self.createFSStartTimer.start(100,True)
else:
self.success = False
self.msg = msg
self.exitMessageTimer.start(100,True)
def msgWaitingMkfsCB(self, ret, msg=""):
if self.devicenumber < self.newpartitions:
self.createFSStartTimer.start(100,True)
else:
if ret == True:
self.success = True
self.msg = _("Device Initialization finished sucessfully!")
self.updateDeviceInfo()
self.exitMessageTimer.start(100,True)
else:
self.success = False
self.msg = msg
self.exitMessageTimer.start(100,True)
def exitMessage(self):
if self.success:
self.session.openWithCallback(self.exit, MessageBox, self.msg, MessageBox.TYPE_INFO, timeout = 10)
else:
self.session.openWithCallback(self.exit, MessageBox, self.msg, MessageBox.TYPE_ERROR, timeout = 10)
def updateDeviceInfo(self):
# update devicemanager configs
devicemanagerconfig.updateConfigList()
# Initializing end
# device check start..
class DeviceCheck(Screen):
skin = """<screen position="0,0" size="0,0"/>"""
def __init__(self, session, partition):
Screen.__init__(self, session)
self.session = session
self.deviceCheckConsole = Console()
self.partition = partition
self.onLayoutFinish.append(self.timerStart)
self.checkStartTimer = eTimer()
self.checkStartTimer.callback.append(self.confirmMessage)
self.umountTimer = eTimer()
self.umountTimer.callback.append(self.doUnmount)
def timerStart(self):
self.checkStartTimer.start(100,True)
def confirmMessage(self):
fssize = self.partition["size"]
if long(fssize) > 1024*1024*1024*16:
message = _("Do you really want to check the filesystem?\nThis could take lots of time!")
self.session.openWithCallback(self.confirmed, MessageBox, message)
else:
self.deviceCheckStart()
def confirmed(self, ret):
print "confirmed : ",ret
if ret:
self.deviceCheckStart()
else:
self.exit()
def deviceCheckStart(self):
print "deviceCheckStart "
print "partition : ", self.partition
device = self.partition["partition"]
fstype = self.partition["fstype"]
fssize = self.partition["size"]
if device is not None and fstype.startswith("ext"):
msg = _("Check filesystem, please wait ...")
msg += _("\nDevice : /dev/%s")%(device)
msg += _("\nFilesystem : %s")%(fstype)
self.msgWaiting = self.session.openWithCallback(self.msgWaitingCB, MessageBox_2, msg, type = MessageBox.TYPE_INFO, enable_input = False)
self.umountTimer.start(500,True)
else:
self.exit()
def doUnmount(self):
device = self.partition["partition"]
mountpoint = self.partition["mountpoint"]
fstype = self.partition["fstype"]
if mountpoint != "":
self.doUmountFsck(device, mountpoint, fstype)
else:
self.umountFsckFinished("NORESULT", 0, (device, mountpoint, fstype))
def doUmountFsck(self, device, mountpoint, fstype):
cmd = "umount /dev/%s" % device
self.deviceCheckConsole.ePopen(cmd, self.umountFsckFinished, (device, mountpoint, fstype))
def umountFsckFinished(self, result, retval, extra_args = None):
device = extra_args[0]
mountpoint = extra_args[1]
fstype = extra_args[2]
if retval == 0:
cmd = "fsck." + fstype + " -f -p /dev/" + device
self.deviceCheckConsole.ePopen(cmd, self.fsckFinished, extra_args)
else:
errorMsg = _("Can not umount device /dev/%s.\nMaybe some files of the filesystem are open")%device
self.msgWaiting.run_close(False,errorMsg)
def fsckFinished(self, result, retval, extra_args = None):
device = extra_args[0]
mountpoint = extra_args[1]
if retval == 0:
text = _("Filesystem check finished sucessfully")
self.msgWaiting.run_close(True, text)
else:
text = _("Error checking disk. The disk or filesystem may be damaged")
self.msgWaiting.run_close(False, text)
def msgWaitingCB(self, ret, msg):
if ret:
self.session.open(MessageBox, msg, MessageBox.TYPE_INFO, timeout = 10)
else:
self.session.open(MessageBox, msg, MessageBox.TYPE_ERROR, timeout = 10)
partition = self.partition["partition"]
mountpoint = self.partition["mountpoint"]
fstype = self.partition["fstype"]
if mountpoint != "":
if fstype == "ntfs":
cmd = "ntfs-3g /dev/" + partition + " " + mountpoint
else:
cmd = "mount /dev/" + partition + " " + mountpoint
self.deviceCheckConsole.ePopen(cmd, self.mountPartitionFinished)
else:
self.exit()
def mountPartitionFinished(self, result, retval, extra_args = None):
self.exit()
def exit(self):
self.close()
#device check end
#device format start
class DeviceFormat(Screen):
skin = """<screen position="0,0" size="0,0"/>"""
def __init__(self, session, partition, newfstype):
Screen.__init__(self, session)
self.session = session
self.deviceFormatConsole = Console()
self.partition = partition
self.newfstype = newfstype
self.unmountedList = []
self.onLayoutFinish.append(self.timerStart)
self.formatStartTimer = eTimer()
self.formatStartTimer.callback.append(self.DeviceFormatStart)
self.setHotplugDisabled = False
self.umountTimer = eTimer()
self.umountTimer.callback.append(self.doUnmount)
def timerStart(self):
self.formatStartTimer.start(100,True)
def DeviceFormatStart(self):
devicemanagerhotplug.setHotplugActive(False)
self.setHotplugDisabled = True
print "DeviceFormatStart : ", self.partition,
print "Filesystem : ",self.newfstype
device = self.partition["partition"]
devicepath = "/dev/"+device
fssize = self.partition["size"]
newfstype = self.newfstype
msg = _("Format filesystem, please wait ...")
msg += _("\nDevice : %s")%(devicepath)
msg += _("\nFilesystem : %s")%(newfstype)
msg += _("\nSize : %s")%(byteConversion(fssize))
self.msgWaiting = self.session.openWithCallback(self.msgWaitingCB, MessageBox_2, msg, type = MessageBox_2.TYPE_INFO, enable_input = False, msgBoxID = None)
self.umountTimer.start(500,True)
def doUnmount(self):
mountpoint = self.partition["mountpoint"]
if mountpoint != "":
self.doumountPartition()
else:
self.umountPartitionFinished("NORESULT", 0)
def doumountPartition(self):
oldfstype = self.partition["fstype"]
newfstype = self.newfstype
if newfstype == oldfstype:
device = self.partition["partition"]
else:
device = self.partition["partition"][:3]
cmd = ""
mounts = file('/proc/mounts','r')
for line in mounts.readlines():
if line.startswith("/dev/%s"%device):
cmd += "umount %s;"%line.split()[0]
self.unmountedList.append([line.split()[0], line.split()[1]])
self.deviceFormatConsole.ePopen(cmd, self.umountPartitionFinished)
def umountPartitionFinished(self, result, retval, extra_args = None):
partition = self.partition["partition"]
oldfstype = self.partition["fstype"]
newfstype = self.newfstype
if retval == 0:
if oldfstype == newfstype:
self.changePartitionIDFinished("NORESULT", 0)
else:
cmd = "sfdisk --change-id /dev/%s %s" % (partition[:3], partition[3:])
if newfstype[:3] == "ext":
cmd += " 83"
else:
cmd += " c"
self.deviceFormatConsole.ePopen(cmd, self.changePartitionIDFinished)
else:
errorMsg = _("Can not umount device /dev/%s.\nMaybe some files of the filesystem are open")%partition[:3]
self.msgWaiting.run_close(False,errorMsg)
def changePartitionIDFinished(self, result, retval, extra_args = None):
device = self.partition["partition"][:3]
mountpoint = self.partition["mountpoint"]
oldfstype = self.partition["fstype"]
newfstype = self.newfstype
if retval == 0:
if oldfstype == newfstype:
self.refreshPartitionFinished("NORESULT", 0)
else:
cmd = "sfdisk -R /dev/%s; sleep 5"%(device)
self.deviceFormatConsole.ePopen(cmd, self.refreshPartitionFinished)
else:
if result and result.find("Use GNU Parted") > 0:
print "[DeviceManager] /dev/%s use GNU Parted!" % device
self.refreshPartitionFinished("NORESULT", 0)
else:
errorMsg = _("Can not change the partition ID for %s")%device
self.msgWaiting.run_close(False,errorMsg)
def refreshPartitionFinished(self, result, retval, extra_args = None):
print "refreshPartitionFinished!"
partition = self.partition["partition"]
mountpoint = self.partition["mountpoint"]
size = int(self.partition["size"])/1024/1024
oldfstype = self.partition["fstype"]
newfstype = self.newfstype
if retval == 0:
if newfstype == "ext4":
cmd = "/sbin/mkfs.ext4 -F "
if size > 2 * 1024:
cmd += "-T largefile "
cmd += "-O extent,flex_bg,large_file,uninit_bg -m1 /dev/" + partition
elif newfstype == "ext3":
cmd = "/sbin/mkfs.ext3 -F "
if size > 2 * 1024:
cmd += "-T largefile "
cmd += "-m0 /dev/" + partition
elif newfstype == "ext2":
cmd = "/sbin/mkfs.ext2 -F "
if size > 2 * 1024:
cmd += "-T largefile "
cmd += "-m0 /dev/" + partition
elif newfstype == "vfat":
if size > 4 * 1024 * 1024:
cmd = "/usr/sbin/mkfs.vfat -I -S4096 /dev/" + partition
else:
cmd = "/usr/sbin/mkfs.vfat -I /dev/" + partition
self.deviceFormatConsole.ePopen(cmd, self.mkfsFinished)
else:
errorMsg = _("Can not format device /dev/%s.\nrefresh partition information failed!")%partition
self.msgWaiting.run_close(False,errorMsg)
def mkfsFinished(self, result, retval, extra_args = None):
print "mkfsFinished!"
partition = self.partition["partition"]
if retval == 0:
cmd = ""
if len(self.unmountedList) == 0:
self.doMountFinished("NORESULT",0)
for x in self.unmountedList:
cmd += "mount %s %s;"%(x[0], x[1])
self.deviceFormatConsole.ePopen(cmd, self.doMountFinished)
else:
text = _("Make filesystem Error /dev/%s.\nPlease check your device.")%partition
self.msgWaiting.run_close(False, text)
def doMountFinished(self, result, retval, extra_args = None):
print "doMountFinished!"
text = _("Format finished sucessfully.")
self.msgWaiting.run_close(True, text)
def msgWaitingCB(self, ret, msg):
if ret:
self.session.openWithCallback(self.exit, MessageBox, msg, MessageBox.TYPE_INFO, timeout = 10)
else:
self.session.openWithCallback(self.exit, MessageBox, msg, MessageBox.TYPE_ERROR, timeout = 10)
def exit(self, ret):
if self.setHotplugDisabled == True:
devicemanagerhotplug.setHotplugActive(True)
self.setHotplugDisabled = False
self.close()
#device format end
class DeviceInfo():
def __init__(self):
self.blockDeviceList = []
def getBlockDevices(self):
return self.blockDeviceList
def refresh(self):
self.blockDeviceList = []
self.getBlockDeviceList()
def getBlockDeviceList(self):
print "get block device Infomations..."
for blockdev in listdir("/sys/block"):
(error, blacklisted, removable, partitions, size, model, vendor) = self.getBlockDeviceInfo(blockdev)
if not blacklisted and not error:
# print "%s : error %s, blacklisted %s, removable %s, partitions %s, size %s"%(blockdev, error, blacklisted, removable, partitions, size)
blockDevice = {}
blockDevice["blockdev"] = blockdev # str
blockDevice["removable"] = removable # bool [True, False]
blockDevice["partitions"] = partitions # list
blockDevice["size"] = size # str
blockDevice["model"] = model # str
blockDevice["vendor"] = vendor # str
self.blockDeviceList.append(blockDevice)
def SortPartList(self, partList):
length = len(partList)-1
sorted = False
while sorted is False:
sorted = True
for idx in range(length):
if int(partList[idx][3:]) > int(partList[idx+1][3:]):
sorted = False
partList[idx] , partList[idx+1] = partList[idx+1], partList[idx]
def getBlockDeviceInfo(self, blockdev):
devpath = "/sys/block/" + blockdev
error = False
removable = False
blacklisted = False
partitions = []
size =""
model = ""
vendor = ""
try:
dev = int(readFile(devpath + "/dev").split(':')[0])
if dev in (7, 31) or blockdev[0:2] != 'sd': # 7: loop, 31 : mtdblock
blacklisted = True
return error, blacklisted, removable, partitions, size, model, vendor
removable = bool(int(readFile(devpath + "/removable")))
size = str(int(readFile(devpath + "/size").strip())*512)
model = readFile(devpath + "/device/model")
vendor = readFile(devpath + "/device/vendor")
for partition in listdir(devpath):
if partition[:len(blockdev)] != blockdev:
continue
partitions.append(partition)
self.SortPartList(partitions)
except IOError:
error = True
return error, blacklisted, removable, partitions, size, model, vendor
def getPartitionInfo(self, partition):
mountPoint = self.getPartitionMountpoint(partition)
(uuid , fsType) = self.getPartitionBlkidInfo(partition)
size_total = self.getPartitionSize(partition)
size_free = ""
if mountPoint != "":
size_free = self.getPartitionFree(mountPoint)
partitionInfo = {}
partitionInfo["partition"] = partition
partitionInfo["mountpoint"] = mountPoint
partitionInfo["uuid"] = uuid
partitionInfo["fstype"] = fsType
partitionInfo["size"] = size_total
partitionInfo["free"] = size_free
return partitionInfo
def getPartitionMountpoint(self, partition):
mounts = file('/proc/mounts').read().split('\n')
for x in mounts:
if not x.startswith('/'):
continue
devpath, mountpoint, = x.split()[:2]
if mountpoint.startswith('/autofs'):
continue
if path.basename(devpath) == partition:
return mountpoint
return ""
def getPartitionBlkidInfo(self, partition):
parttionDev = "/dev/"+str(partition)
uuid = ""
partitionType = ""
cmd = "blkid -c /dev/null "+str(parttionDev)
try:
line = popen(cmd).readline().strip()
if not line.startswith(parttionDev):
return (uuid, partitionType)
# print "Blikd %s : %s"%(parttionDev, line)
if line.find(" UUID=") != -1:
uuid = line.split(" UUID=")[1].split(' ')[0]
if line.find(" TYPE=") != -1:
partitionType = line.split(" TYPE=")[1].split(' ')[0].strip('"')
except:
print "get blkid info error (%s)"%cmd
return (uuid, partitionType)
def getPartitionSize(self, partition):
devpath = "/sys/block/%s/%s"%( str(partition[:3]), str(partition) )
try:
size = readFile(devpath + "/size")
return str(int(size)*512)
except:
return ""
def getPartitionFree(self, mountPoint):
try:
stat = statvfs(mountPoint)
size_free = stat.f_bfree*stat.f_bsize
return size_free
except:
return ""
def checkMountPoint(self, check_mountpoint):
res = []
try:
mounts = file('/proc/mounts').read().split('\n')
for x in mounts:
if not x.startswith('/'):
continue
devpath, mountpoint = x.split()[:2]
if mountpoint == check_mountpoint:
res.append(devpath)
except:
pass
return res
def checkMountDev(self, device):
res = []
try:
mounts = file('/proc/mounts').read().split('\n')
for x in mounts:
if not x.startswith('/'):
continue
devpath, mountpoint = x.split()[:2]
if devpath == device:
res.append(mountpoint)
except:
pass
return res
def isMounted(self, devpath, mountpoint):
try:
mounts = file('/proc/mounts').read().split('\n')
for x in mounts:
if not x.startswith('/'):
continue
_devpath, _mountpoint = x.split()[:2]
if devpath == _devpath and mountpoint == _mountpoint:
return True
except:
pass
return False
def isMountable(self, partition):
autofsPath = "/autofs/"+partition.device
mountable = False
try:
os.listdir(autofsPath)
mountable = True
except:
pass
return mountable
def isFstabAutoMounted(self, uuid, devpath, mountpoint):
# print " >> isFstabMounted, uuid : %s, devpath : %s, mountpoint : %s"%(uuid, devpath, mountpoint)
if mountpoint[-1] == '/':
mountpoint = mountpoint[:-1]
data = file('/etc/fstab').read().split('\n')
for line in data:
if not line.startswith('/'):
continue
dev, mp, ms = line.split()[0:3]
if uuid is not None and dev.startswith('UUID'):
if dev.split('=')[1] == uuid.strip("\"") and mp == mountpoint and ms == 'auto':
# print " >> line : ", line
return True
elif dev == devpath and mp == mountpoint and ms == 'auto':
# print " >> line : ", line
return True
return False
def umountByMountpoint(self, mountpoint):
if mountpoint is None:
return False
try:
if path.ismount(mountpoint):
cmd = "umount " + mountpoint
print "[DeviceManager] ", cmd
os.system(cmd)
except:
print "Umount by mountpoint failed!"
if not path.ismount(mountpoint):
return True
return False
def umountByDevpath(self, devpath):
cmd = "umount " + devpath
print "[DeviceManager] ", cmd
os.system(cmd)
deviceinfo = DeviceInfo()
class MountpointBrowser(Screen):
skin="""
<screen name="MountpointBrowser" position="center,120" size="670,500" title="Select mountpoint">
<ePixmap pixmap="skin_default/buttons/red.png" position="20,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="180,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="340,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/blue.png" position="500,0" size="140,40" alphatest="on" />
<widget source="key_red" render = "Label" position="20,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render = "Label" position="180,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#1f771f" transparent="1" />
<widget source="key_yellow" render = "Label" position="340,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#a08500" transparent="1" />
<widget source="key_blue" render = "Label" position="500,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#18188b" transparent="1" />
<eLabel position="10,50" size="650,1" backgroundColor="#b3b3b9"/>
<widget name="filelist" position="10,60" size="650,440" itemHeight="30" scrollbarMode="showOnDemand"/>
</screen>
"""
def __init__(self, session):
Screen.__init__(self, session)
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Select"))
self["key_yellow"] = StaticText(_("Create directory"))
self["key_blue"] = StaticText("Delete directory")
directory = "/media/"
inhibitDirs = ["/autofs", "/mnt", "/hdd", "/bin", "/boot", "/dev", "/etc", "/home", "/lib", "/proc", "/sbin", "/share", "/sys", "/tmp", "/usr", "/var"]
self.filelist = FileList(directory, matchingPattern="", inhibitDirs = inhibitDirs)
self["filelist"] = self.filelist
self["shortcuts"] = ActionMap(["ColorActions"],
{
"red": self.exit,
"green": self.select,
"yellow": self.createDirectory,
"blue": self.deleteDirectory,
}, -2)
self["OkCancelActions"] = ActionMap(["OkCancelActions"],
{
"cancel": self.exit,
"ok": self.ok,
}, -2)
def ok(self):
if self.filelist.canDescent():
self.filelist.descent()
def select(self):
if self["filelist"].getCurrentDirectory() is not None:
if self.filelist.canDescent() and self["filelist"].getFilename() and self["filelist"].getFilename().startswith(self["filelist"].getCurrentDirectory()):
self.filelist.descent()
currDir = self["filelist"].getCurrentDirectory()
self.close(currDir)
else:
self.close(self["filelist"].getFilename())
def createDirectory(self):
self.session.openWithCallback(self.createDirectoryCB, VirtualKeyBoard, title = (_("Input mount point path.")), text = "")
def createDirectoryCB(self, retval = None):
newdir=None
try:
if retval is not None:
newdir = self["filelist"].getCurrentDirectory()+'/'+retval
if not path.exists(newdir):
os.system("mkdir %s"%newdir)
self.filelist.refresh()
except:
if newdir:
self.session.open(MessageBox, _("Create directory failed!\n%s")%newdir, MessageBox.TYPE_ERROR, timeout = 10)
def deleteDirectory(self):
delDir=None
try:
if self["filelist"].getCurrentDirectory() is not None:
if self.filelist.canDescent() and self["filelist"].getFilename() and self["filelist"].getFilename().startswith(self["filelist"].getCurrentDirectory()):
delDir = self["filelist"].getFilename()
if path.exists(delDir):
os.system("rmdir '%s'"%delDir)
if path.exists(delDir):
self.session.open(MessageBox, _("Delete directory failed!\nMaybe directory is not empty."), MessageBox.TYPE_ERROR, timeout = 10)
self.filelist.refresh()
except:
if delDir:
self.session.open(MessageBox, _("Delete directory failed!\n%s")%newdir, MessageBox.TYPE_ERROR, timeout = 10)
def exit(self):
self.close(False)
class MessageBoxConfirm(MessageBox):
skin = """
<screen position="center,center" size="620,10" title="Message">
<widget name="text" position="65,8" size="420,0" font="Regular;20" />
<widget name="ErrorPixmap" pixmap="skin_default/icons/input_error.png" position="5,5" size="53,53" alphatest="blend" />
<widget name="QuestionPixmap" pixmap="skin_default/icons/input_question.png" position="5,5" size="53,53" alphatest="blend" />
<widget name="InfoPixmap" pixmap="skin_default/icons/input_info.png" position="5,5" size="53,53" alphatest="blend" />
<widget name="list" position="100,100" size="380,375" transparent="1" />
<applet type="onLayoutFinish">
# this should be factored out into some helper code, but currently demonstrates applets.
from enigma import eSize, ePoint
orgwidth = self.instance.size().width()
orgheight = self.instance.size().height()
orgpos = self.instance.position()
textsize = self["text"].getSize()
# y size still must be fixed in font stuff...
textsize = (textsize[0] + 50, textsize[1] + 50)
offset = 0
if self.type == self.TYPE_YESNO:
offset = 60
wsizex = textsize[0] + 60
wsizey = textsize[1] + offset
if (280 > wsizex):
wsizex = 280
wsize = (wsizex, wsizey)
# resize
self.instance.resize(eSize(*wsize))
# resize label
self["text"].instance.resize(eSize(*textsize))
# move list
listsize = (wsizex, 50)
self["list"].instance.move(ePoint(0, textsize[1]))
self["list"].instance.resize(eSize(*listsize))
# center window
newwidth = wsize[0]
newheight = wsize[1]
self.instance.move(ePoint(orgpos.x() + (orgwidth - newwidth)/2, orgpos.y() + (orgheight - newheight)/2))
</applet>
</screen>
"""
dmconfigfile = resolveFilename(SCOPE_PLUGINS, "SystemPlugins/DeviceManager/devicemanager.cfg")
class DeviceManagerConfig():
def __init__(self):
self.configList = []
def getConfigList(self):
return self.configList
def updateConfigList(self):
try:
self.configList = []
file = open("/proc/mounts")
mounts = file.readlines()
file.close()
for x in mounts:
if x.startswith("/dev/sd"):
device = x.split()[0].split('/dev/')[1]
mountpoint = x.split()[1]
if mountpoint.startswith('/autofs'):
continue
(uuid, partitionType) = deviceinfo.getPartitionBlkidInfo(device)
if uuid != '' and mountpoint != '':
self.configList.append([uuid, mountpoint])
self.saveConfig()
except:
print "updateConfigList failed!"
def loadConfig(self):
if not fileExists(dmconfigfile):
os.system("touch %s" % dmconfigfile)
self.configList = []
data = file(dmconfigfile).read().split('\n')
for line in data:
if line.find(':') != -1:
(uuid, mountpoint) = line.split(':')
if uuid != '' and mountpoint != '':
self.configList.append([uuid, mountpoint])
def saveConfig(self):
confFile = open(dmconfigfile,'w')
data = ""
for line in self.configList:
data += "%s:%s\n"%(line[0],line[1]) # uuid, mountpoint
confFile.write(data)
confFile.close()
def appendConfig(self, uuid, mountpoint):
for x in self.configList:
if x[0] == uuid or x[1] == mountpoint:
self.configList.remove(x)
self.configList.append([uuid, mountpoint])
def removeConfig(self, value):
for x in self.configList:
if x[0] == value or x[1] == value:
self.configList.remove(x)
devicemanagerconfig = DeviceManagerConfig()
class deviceManagerHotplug:
def __init__(self):
self.hotplugActive = True
def setHotplugActive(self,value=True):
if value:
self.hotplugActive = True
else:
self.hotplugActive = False
def printDebug(self):
for p in harddiskmanager.partitions:
print " # partition : %s %s %s %s %s(mp, des, f_mounted, is_hot, dev)"%(p.mountpoint, p.description, p.force_mounted, p.is_hotplug,p.device)
def doMount(self, uuid, devpath, mountpoint, filesystem):
# check current device mounted on another mountpoint.
mp_list = []
mp_list = deviceinfo.checkMountDev(devpath)
for mp in mp_list:
if mp != mountpoint and path.ismount(mp):
deviceinfo.umountByMountpoint(mp)
# check another device mounted on configmountpoint
devpath_list = []
devpath_list = deviceinfo.checkMountPoint(mountpoint)
for devpath_ in devpath_list:
if devpath_ != devpath:
print "[DeviceManager] Mount Failed. (Another device is already mounted)"
return
# do mount
# print "[DeviceManager] doMount"
if not path.exists(mountpoint):
os.system("mkdir %s"%mountpoint)
if path.exists(mountpoint):
if not path.ismount(mountpoint):
if filesystem == "ntfs":
cmd = "ntfs-3g %s %s"%(devpath, mountpoint)
elif filesystem is None:
cmd = "mount %s %s"%(devpath, mountpoint)
else:
cmd = "mount -t %s %s %s"%(filesystem, devpath, mountpoint)
print "[DeviceManager] cmd : %s"%cmd
os.system(cmd)
if not deviceinfo.isMounted(devpath, mountpoint):
print "[DeviceManager] %s doMount failed!"%devpath
return
else:
# Update partition Info, add
self.addPartitionAutofsMountpoint(devpath, mountpoint)
def doUmount(self, device, mountpoint):
devpath = "/dev/"+device
mountpoints = deviceinfo.checkMountDev(devpath)
if len(mountpoints) == 0:
return
for mp in mountpoints:
cmd = "umount %s"%devpath
print "[DeviceManager] cmd : %s"%cmd
os.system(cmd)
def addHotPlugDevice(self, partition):
device = partition.device
devpath = "/dev/"+device
# get BlkidInfo
(uuid, filesystem) = deviceinfo.getPartitionBlkidInfo(device)
if uuid == "":
# retry..
os.system("sleep 1")
(uuid, filesystem) = deviceinfo.getPartitionBlkidInfo(device)
if uuid == "":
print "[DeviceManagerHotplug] getBlkidInfo failed!"
return
# get configList
devicemanagerconfig.loadConfig()
configList = devicemanagerconfig.getConfigList()
mountpoint = None
for line in configList:
if uuid == line[0].strip():
mountpoint = line[1].strip()
break
if mountpoint is None:
return
# do mount
if deviceinfo.isMounted(devpath, mountpoint):
pass
# print "[DeviceManagerHotplug] already mounted"
else:
self.doMount(uuid, devpath, mountpoint, filesystem)
def removeHotplugDevice(self, partition):
self.doUmount(partition.device, partition.mountpoint)
def getHotplugAction(self, action, partition):
if not self.hotplugActive or not config.plugins.devicemanager.hotplug_enable.value:
return
if partition.device is None or not partition.device.startswith("sd"):
return
print "[DeviceManagerHotplug] action : %s, device : %s"%(action, partition.device)
if action == 'add':
self.addHotPlugDevice(partition)
elif action == 'remove':
self.removeHotplugDevice(partition)
def addPartitionAutofsMountpoint(self, devpath, mountpoint):
device = path.basename(devpath)
autofsMountpoint = harddiskmanager.getAutofsMountpoint(device)
# check already appended to partition list
for x in harddiskmanager.partitions:
if x.mountpoint == autofsMountpoint or x.mountpoint == mountpoint:
return
#
from Components.Harddisk import Partition
physdev = path.realpath('/sys/block/' + device[:3] + '/device')[4:]
description = harddiskmanager.getUserfriendlyDeviceName(device, physdev)
p = Partition(mountpoint = autofsMountpoint, description = description, force_mounted = True, device = device)
harddiskmanager.partitions.append(p)
harddiskmanager.on_partition_list_change("add", p)
def autoMountOnStartup(self):
devicemanagerconfig.loadConfig()
configList = devicemanagerconfig.getConfigList()
# get blkid info
blkiddata = []
data = os.popen("blkid -c /dev/NULL /dev/sd*").readlines()
for line in data:
devpath = uuid = filesystem = ""
devpath = line.split(':')[0]
if line.find(" UUID=") != -1:
uuid = line.split(" UUID=")[1].split(' ')[0]
if line.find(" TYPE=") != -1:
filesystem = line.split(" TYPE=")[1].split(' ')[0].strip('"')
blkiddata.append((devpath, uuid, filesystem))
# check configList
for c in configList:
uuid_cfg = c[0].strip()
mountpoint_cfg = c[1].strip()
for (devpath, uuid, filesystem) in blkiddata:
if uuid_cfg == uuid:
# do mount
if deviceinfo.isMounted(devpath, mountpoint_cfg):
# print "[Devicemanager startup] already mounted"
self.addPartitionAutofsMountpoint(devpath, mountpoint_cfg)
else:
# print "[autoMountOnStartup] do mount(%s %s %s)"%(devpath, configmountpoint, filesystem)
self.doMount(uuid, devpath, mountpoint_cfg, filesystem)
def umountOnShutdown(self):
devicemanagerconfig.loadConfig()
configList = devicemanagerconfig.getConfigList()
# get mount info
mounts = []
data = file('/proc/mounts').read().split('\n')
for x in data:
if not x.startswith('/dev/sd'):
continue
devpath, mountpoint = x.split()[:2]
mounts.append((path.basename(devpath), mountpoint))
# get blkid info
data = self.getBlkidInfo()
# check configList
for c in configList:
uuid_cfg = c[0].strip()
mountpoint_cfg = c[1].strip()
device_cfg = None
if uuid_cfg in data.keys():
device_cfg = data[uuid_cfg]
if device_cfg is None:
continue
for (device, mountpoint) in mounts:
if device_cfg == device:
if not deviceinfo.isFstabAutoMounted(uuid_cfg, "/dev/"+device_cfg, mountpoint_cfg):
self.doUmount(device, mountpoint)
def getBlkidInfo(self):
data = {}
blkid_data = os.popen("blkid -c /dev/NULL /dev/sd*").read()
for line in blkid_data.split('\n'):
# print "[DeviceManager] getBlkidInfo line : ",line
device = uuid = ""
device = path.basename(line.split(':')[0])
if line.find(" UUID=") != -1:
blkid_uuid = line.split(" UUID=")[1].split(' ')[0]
data[blkid_uuid] = device
return data
devicemanagerhotplug = deviceManagerHotplug()
def DeviceManagerhotplugDeviceStart(action, device):
devicemanagerhotplug.getHotplugAction(action, device)
def callBackforDeviceManager(session, callback_result = False):
if callback_result == True:
session.open(DeviceManager)
def checkMounts(session):
try:
noMountable_dev = ""
for blockdev in listdir("/sys/block"):
devpath = "/sys/block/" + blockdev
dev = int(readFile(devpath + "/dev").split(':')[0])
if dev in (7, 31) or blockdev[0:2] != 'sd': # 7: loop, 31 : mtdblock
continue
partitions = []
noMountable_partitions = []
for partition in listdir(devpath):
if not partition.startswith(blockdev):
continue
partitions.append(partition)
if os.access('/autofs/'+partition,0) is False:
noMountable_partitions.append(partition)
if len(partitions) == 0 or len(noMountable_partitions) != 0:
if noMountable_dev != "":
noMountable_dev += ' '
noMountable_dev += blockdev
if noMountable_dev != "":
print "Umountable partitions found."
InfoText = _("No mountable devices found.! (%s)\nDo you want to open DeviceManager and do initialize or format this device?\n\n(Open 'Menu->Setup->System -> Harddisk -> DeviceManager'\n and press MENU button to deactivate this check.)")%noMountable_dev
AddNotificationWithCallback(
boundFunction(callBackforDeviceManager, session),
MessageBox, InfoText, timeout = 60, default = False
)
except:
print "checkMounts failed!"
def sessionstart(reason, **kwargs):
if reason == 0:
if kwargs.has_key("session") and config.plugins.devicemanager.mountcheck_enable.value == True:
session = kwargs["session"]
checkMounts(session)
if config.plugins.devicemanager.hotplug_enable.value:
harddiskmanager.on_partition_list_change.append(DeviceManagerhotplugDeviceStart)
elif reason == 1:
if config.plugins.devicemanager.hotplug_enable.value:
harddiskmanager.on_partition_list_change.remove(DeviceManagerhotplugDeviceStart)
def autostart(reason, **kwargs):
if reason == 0:
try:
# check at first enigma2 start
if not fileExists(dmconfigfile):
print "[DeviceManager] autostart : check devices at first start"
sda_isremovable = False
sda_UUID = ""
os.system("touch %s"%dmconfigfile)
# check sda
sda_data = popen("cat /proc/partitions | grep sda1").read()
if sda_data != '':
sda_UUID = popen("blkid -o value -s UUID /dev/sda1").read().strip('\n')
sda_isremovable = bool(int(readFile("/sys/block/sda/removable")))
print "sda : %s, %s"%(sda_UUID, sda_isremovable)
cfg = ""
if sda_data != '':
cfg += '"%s":/media/hdd\n'%sda_UUID
confFile = open(dmconfigfile,'w')
confFile.write(cfg)
confFile.close()
if not path.exists("/media/hdd"):
os.system("mkdir -p /media/hdd")
# auto mount
devicemanagerhotplug.autoMountOnStartup()
except:
print "[DeviceManager] autostart failed!"
elif reason == 1:
devicemanagerhotplug.umountOnShutdown()
def menu(menuid, **kwargs):
if menuid == "system":
return [(_("DeviceManager"), main, "device_manager", 50)]
return []
def main(session, **kwargs):
session.open(DeviceManager)
def Plugins(path, **kwargs):
return [
PluginDescriptor(name = _("DeviceManager"), description = _("manage block devices of your VU+"), where = PluginDescriptor.WHERE_MENU,fnc=menu),
PluginDescriptor(where = PluginDescriptor.WHERE_SESSIONSTART, needsRestart = True, fnc = sessionstart),
PluginDescriptor(where = PluginDescriptor.WHERE_AUTOSTART, needsRestart = True, fnc = autostart)
]
class MessageBox_2(MessageBox):
def __init__(self, session, text, type = MessageBox.TYPE_YESNO, timeout = -1, close_on_any_key = False, default = True, enable_input = True, msgBoxID = None):
MessageBox.__init__(self, session, text, type, timeout, close_on_any_key, default, enable_input, msgBoxID)
self.skinName = "MessageBox"
self.closeTimer = eTimer()
self.closeTimer.callback.append(self.msg_close)
self.devicemanager_ret = False
self.devicemanager_msg = ""
def msg_close(self):
self.close(self.devicemanager_ret, self.devicemanager_msg)
def run_close(self, ret, msg=""):
self.devicemanager_ret = ret
self.devicemanager_msg = msg
self.closeTimer.start(100,True)
def createSummary(self):
return MessageBox_2_Summary
class MessageBox_2_Summary(Screen):
skin="""
<screen name="MessageBox_2_Summary" position="0,0" size="256,64" id="1">
<widget source="parent.Text" render="Label" position="0,0" size="256,64" font="Regular;13" halign="center" valign="center" />
</screen>
"""
| gpl-2.0 | -7,477,652,780,564,643,000 | 36.388979 | 262 | 0.690985 | false | 3.177111 | true | false | false |
rvykydal/blivet | blivet/devicelibs/disk.py | 1 | 3521 | from collections import namedtuple
from . import raid
from .. import errors
from .. import util
from ..size import Size
lsm = None
_HBA_PLUGIN_URIS = ("hpsa://", "megaraid://")
LSMInfo = namedtuple('HBAVolumeInfo', ['system', 'nodes', 'raid_type', 'raid_stripe_size', 'raid_disk_count'])
""" .. class:: LSMInfo
.. attribute:: system (str): descriptive name of HBA unit
.. attribute:: nodes (list[str]): list of device node paths for the volume
.. attribute:: raid_type (:class:`~.devicelibs.raid.RAIDLevel` or None): RAID level
.. attribute:: raid_stripe_size (:class:`~.size.Size` or None): stripe size
.. attribute:: raid_disk_count (int or None): number of disks in the RAID set
"""
volumes = dict()
_raid_levels = dict()
class _LSMRAIDLevelStub(raid.RAIDLevel):
def __init__(self, name):
self._name = name
@property
def name(self):
return self._name
@property
def names(self):
return [self.name]
@property
def min_members(self):
return 0
def has_redundancy(self):
return False
def is_uniform(self):
return False
class _LSMDependencyGuard(util.DependencyGuard):
error_msg = "libstoragemgmt functionality not available"
def _check_avail(self):
global lsm
if lsm is None: # pylint: disable=used-before-assignment
try:
import lsm # pylint: disable=redefined-outer-name
except ImportError:
lsm = None
return lsm is not None
_lsm_required = _LSMDependencyGuard()
def _update_lsm_raid_levels():
""" Build a mapping of lsm.RAID_TYPE->blivet.devicelibs.raid.RAIDLevel """
global _raid_levels
_raid_levels = dict()
lsm_raid_levels = dict((k, v) for (k, v) in lsm.Volume.__dict__.items() if k.startswith("RAID_TYPE_"))
for constant_name, value in lsm_raid_levels.items():
name = constant_name[len("RAID_TYPE_"):]
try:
level = raid.get_raid_level(name)
except errors.RaidError:
level = _LSMRAIDLevelStub(name)
_raid_levels[value] = level
def _get_lsm_raid_level(lsm_raid_type):
""" Return a blivet.devicelibs.raid.RAIDLevel corresponding the lsm-reported RAID level."""
return _raid_levels.get(lsm_raid_type, _raid_levels.get(lsm.Volume.RAID_TYPE_UNKNOWN))
@_lsm_required(critical=False, eval_mode=util.EvalMode.always)
def update_volume_info():
""" Build a dict of namedtuples containing basic HBA RAID info w/ device path keys. """
global volumes
volumes = dict()
_update_lsm_raid_levels()
for uri in _HBA_PLUGIN_URIS:
try:
client = lsm.Client(uri)
except lsm.LsmError:
continue
systems = dict((s.id, s) for s in client.systems())
for vol in client.volumes():
nodes = lsm.LocalDisk.vpd83_search(vol.vpd83)
system = systems[vol.system_id]
caps = client.capabilities(system)
raid_level = None
stripe_size = None
disk_count = None
if caps.supported(lsm.Capabilities.VOLUME_RAID_INFO):
raid_info = client.volume_raid_info(vol)[:3]
raid_level = _get_lsm_raid_level(raid_info[0])
stripe_size = Size(raid_info[1])
disk_count = raid_info[2]
info = LSMInfo(system.name, nodes, raid_level, stripe_size, disk_count)
volumes.update([(node, info) for node in nodes])
| lgpl-2.1 | -8,452,506,432,082,823,000 | 30.4375 | 110 | 0.613178 | false | 3.503483 | false | false | false |
zigapk/adventofcode | 2020/day_22/two.py | 1 | 1428 | from copy import deepcopy
d1 = []
d2 = []
player_2 = False
with open('in', 'r') as f:
f.readline()
for line in f.readlines():
try:
i = int(line.strip())
if player_2:
d2.append(i)
else:
d1.append(i)
except Exception:
player_2 = True
seen = set()
next_game_id = 1
last_deck_1 = None
last_deck_2 = None
def game(deck1, deck2, game_id=0):
global next_game_id, last_deck_1, last_deck_2
while len(deck1) > 0 and len(deck2) > 0:
h = (str(game_id), str(deck1), str(deck2))
if h in seen:
return 1
seen.add(h)
card1, deck1 = deck1[0], deck1[1:]
card2, deck2 = deck2[0], deck2[1:]
if len(deck1) >= card1 and len(deck2) >= card2:
next_game_id += 1
player_1_wins = game(deepcopy(deck1[:card1]), deepcopy(deck2[:card2]), game_id=next_game_id - 1) == 1
else:
player_1_wins = card1 > card2
if player_1_wins:
deck1 = deepcopy(deck1) + [card1, card2]
else:
deck2 = deepcopy(deck2) + [card2, card1]
last_deck_1 = deck1
last_deck_2 = deck2
return 1 if len(deck2) == 0 else 2
game(deepcopy(d1), deepcopy(d2))
res = 0
d = last_deck_1 if len(last_deck_2) == 0 else last_deck_2
d = list(reversed(d))
for i in range(1, len(d) + 1):
res += i * d[i - 1]
print(res)
| mit | -2,103,628,178,939,617,000 | 20.969231 | 113 | 0.523109 | false | 2.811024 | false | false | false |
artemp/MapQuest-Render-Stack | storage/node/storage_node_pylons/storage_node_pylons/lib/mqExpiry.py | 1 | 5688 | import os
import socket
import struct
import logging
import sys
from threading import Lock
# maximum z that's possible within a 64-bit unsigned
# number, which is what we transmit across the network
MAX_Z = 35
# size of a metatile
METATILE = 8
log = logging.getLogger(__name__)
class ZLevels():
"""Stores sizes, in number of metatiles, at each zoom
level. This allows metatiles to be looked up in a
single file or linear array without the need for
multiple files which would complicate syncing to disk.
"""
def __init__(self):
z_sizes = map(lambda z: 4**max(0,z-3), range(0,MAX_Z))
self.offsets = reduce(lambda s,i: s + [s[-1] + i], z_sizes, [0])
def size_of(self, max_z):
bit_size = self.offsets[max_z + 1]
return bit_size / 8 + (1 if bit_size % 8 > 0 else 0)
def tile_to_meta_idx(self, x, y, z):
morton_code = (self._interleave(x) << 1) | self._interleave(y)
return self.offsets[z] + morton_code
def _interleave(self, n):
"""Bit interleaving function, i.e: turns 11011 into
1010001010 and is used to create Morton codes. This
extended version is able to handle up to 2^32-1.
"""
n&= 0xffffffff
n = (n | (n << 16)) & 0x0000FFFF0000FFFF
n = (n | (n << 8)) & 0x00FF00FF00FF00FF
n = (n | (n << 4)) & 0x0F0F0F0F0F0F0F0F
n = (n | (n << 2)) & 0x3333333333333333
n = (n | (n << 1)) & 0x5555555555555555
return n
def _uninterleave(self, n):
"""Inverse of bit interleaving function, able to
handle 64-bit numbers allowing outputs to be up to
2^32-1 in size."""
n&= 0x5555555555555555
n = (n ^ (n >> 1)) & 0x3333333333333333
n = (n ^ (n >> 2)) & 0x0f0f0f0f0f0f0f0f
n = (n ^ (n >> 4)) & 0x00ff00ff00ff00ff
n = (n ^ (n >> 8)) & 0x0000ffff0000ffff
n = (n ^ (n >> 16)) & 0xffffffff
return n
class mqExpiryInfo:
"""Client class for expiry information. Contacts a server
over UDP to get metatile expiry information.
"""
def __init__(self, host, port):
# set up the socket for the first time
self.sock = None
self._socket_init()
family, socktype, proto, canonname, sockaddr = socket.getaddrinfo(host, port, socket.AF_INET)[0]
self.sockaddr = sockaddr
# offsets structure for turning metatile locations into
# raw 64-bit integers.
self.zlevels = ZLevels()
# a lock, so we can ensure thread-local access to the
# socket.
self.mutex = Lock()
def _socket_init(self):
"""Initialise (or re-initialise) the socket. Note that
unless you're in the constructor, you'll need to be holding
the mutex when you call this method.
"""
# shutdown the socket if it already exists
if self.sock is not None:
log.info("Re-opening socket. Old socket is %s [%s]" % (str(self.sock), str(self.sock.fileno())))
try:
self.sock.shutdown(socket.SHUT_RDWR)
except:
# this fails if the socket isn't considered connected, which
# is annoying but perhaps ignorable?
pass
try:
self.sock.close()
except:
# this may also throw an error if the close fails. but we're
# going to open a new socket anyway, so maybe ignorable?
pass
# get rid of reference to old socket, will be garbage collected.
self.sock = None
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
log.info("Opening socket. Socket is %s [%s]" % (str(self.sock), str(self.sock.fileno())))
# set up so that sockets timeout after 0.2s - should
# be enough on localhost to receive a reply.
self.sock.settimeout(0.2)
def get_tile(self, x, y, z, style):
"""Gets information about expiry from the server. Returns
true if the metatile is expired, false otherwise and None
if an error occurred."""
idx = self.zlevels.tile_to_meta_idx(x / METATILE, y / METATILE, z)
rep = self._get_bit(idx, str(style))
if (rep == 'ERR') or (rep is None):
return None
else:
bit = struct.unpack('@B', rep)[0]
return bit != 0
def set_tile(self, x, y, z, style, val):
"""Sends information about the expiry to the server.
Returns true if the request succeeded and false otherwise.
"""
idx = self.zlevels.tile_to_meta_idx(x / METATILE, y / METATILE, z)
bit = 0
if val == True:
bit = 1
ret = self._set_bit(idx, bit, str(style))
return ret == 'OK'
def _basic_req(self, msg):
reply = None
with self.mutex:
try:
self.sock.sendto(msg, self.sockaddr)
reply = self.sock.recv(4096)
except:
# if this times out, return none so that other code
# can handle the error via fall-backs. reset the socket
# so that no odd trailing packets will be received and
# misinterpreted.
self._socket_init()
log.error("Error talking to expiry info server: %s" % str(sys.exc_info()))
reply = None
return reply
def _get_bit(self, idx, style):
return self._basic_req(struct.pack('!Qbc255p', idx, 0, 'G', style))
def _set_bit(self, idx, bit, style):
return self._basic_req(struct.pack('!Qbc255p', idx, bit, 'S', style))
| lgpl-2.1 | 112,859,821,221,572,530 | 35.461538 | 108 | 0.568038 | false | 3.636829 | false | false | false |
funkybob/rattle | rattle/template.py | 1 | 4791 | import ast
import os
from .lexer import lexers
from .parser import parsers
from .utils.astpp import dump as ast_dump
from .utils.parser import ParserState, build_call, build_str_join
AST_DEBUG = os.environ.get('RATTLE_AST_DEBUG', False)
SHOW_CODE = os.environ.get('RATTLE_SHOW_CODE', False)
class TemplateSyntaxError(Exception):
pass
class SafeData(str):
"""
A wrapper for str to indicate it doesn't need escaping.
"""
pass
def escape(text):
"""
Returns the given text with ampersands, quotes and angle brackets encoded
for use in HTML.
"""
if isinstance(text, SafeData):
return text
if not isinstance(text, str):
text = str(text)
return SafeData(
text.replace('&', '&').replace('<', '<').replace('>', '>')
.replace('"', '"').replace("'", ''')
)
def auto_escape(s):
if isinstance(s, SafeData):
return s
return escape(s)
class Library(object):
def __init__(self):
self._filters = {}
self._tags = {}
self._ambiguous_filters = set()
self._ambiguous_tags = set()
@property
def filters(self):
return self._filters
@property
def tags(self):
return self._tags
def register_filter(self, func):
name = func.__name__
full_name = '%s.%s' % (func.__module__, func.__name__)
if name not in self._filters:
if name not in self._ambiguous_filters:
self._filters[func.__name__] = func
elif full_name not in self._filters:
self._ambiguous_filters.add(name)
del self._filters[name]
self._filters[full_name] = func
# Allows use as decorator
return func
def unregister_filter(self, full_name):
self._filters.pop(full_name, None)
_, _, short_name = full_name.rpartition('.')
self._filters.pop(short_name, None)
def register_tag(self, func):
name = func.__name__
full_name = '%s.%s' % (func.__module__, func.__name__)
if name not in self._tags:
if name not in self._ambiguous_tags:
self._tags[func.__name__] = func
elif full_name not in self._tags:
self._ambiguous_tags.add(name)
del self._tags[name]
self._tags[full_name] = func
# Allows use as decorator
return func
def unregister_tag(self, full_name):
self._tags.pop(full_name, None)
_, _, short_name = full_name.rpartition('.')
self._tags.pop(short_name, None)
library = Library()
class Template(object):
def __init__(self, source, origin=None):
self.source = source
self.origin = origin
# A list of compiled tags
self.compiled_tags = []
code = self.parse()
ast.fix_missing_locations(code)
if AST_DEBUG:
print(ast_dump(code))
if SHOW_CODE:
try:
import codegen
print(codegen.to_source(code))
except ImportError:
pass
self.func = compile(code, filename="<template>", mode="exec")
self.default_context = {
'True': True,
'False': False,
'None': None,
}
def parse(self):
"""
Convert the parsed tokens into a list of expressions then join them
"""
tokens = lexers.sl.lex(self.source)
state = ParserState()
klass = parsers.sp.parse(tokens, state)
body = [
klass,
ast.Global(names=['rendered']),
ast.Assign(
targets=[ast.Name(id='rendered', ctx=ast.Store())],
value=build_str_join(
build_call(
ast.Attribute(
value=build_call(
ast.Name(id='Template', ctx=ast.Load())
),
attr='root',
ctx=ast.Load()
),
args=[
ast.Name(id='context', ctx=ast.Load())
],
)
)
)
]
return ast.Module(
body=body
)
def render(self, context={}):
ctx = context.copy()
ctx.update(self.default_context)
global_ctx = {
'context': ctx,
'compiled_tags': self.compiled_tags,
'filters': library.filters,
'auto_escape': auto_escape,
'output': [],
'rendered': None
}
local_ctx = {
}
eval(self.func, global_ctx, local_ctx)
return global_ctx['rendered']
| mit | 6,986,172,143,143,783,000 | 26.377143 | 77 | 0.507201 | false | 4.210018 | false | false | false |
chugle/myapp | applications/welcome/controllers/teacher.py | 1 | 6090 | # coding: utf8
# 尝试
import datetime
now=datetime.date.today()
year=now.year
month=now.month
if int(month) in range(2,8):
xueqi=2
xuenian=str(int(year)-1-2000)
else:
xueqi=1
xuenian=str(int(year)-2000)
@auth.requires(request.client=='127.0.0.1' or auth.has_membership(role='teacher') , requires_login=False)
def index(): return dict()
@auth.requires(request.client=='127.0.0.1' or auth.has_membership(role='teacher') , requires_login=False)
def student_manage():
form = SQLFORM.grid(db.auth_user)
return dict(form=form)
@auth.requires(request.client=='127.0.0.1' or auth.has_membership(role='teacher') , requires_login=False)
def course_manage():
form = SQLFORM.smartgrid(db.course)
return dict(form=form)
@auth.requires(request.client=='127.0.0.1' or auth.has_membership(role='teacher') , requires_login=False)
def keshi_manage():
form = SQLFORM.smartgrid(db.keshi,
constraints={'keshi':(db.keshi.xuenian==xuenian)&(db.keshi.xueqi==xueqi)},
links=[dict(header='',body=lambda row:A('批改作业',_href=URL('pigai',args=row.id))),
dict(header='',body=lambda row:A('添加练习',_href=URL('addwenti',args=row.id)))])
return dict(form=form)
@auth.requires(request.client=='127.0.0.1' or auth.has_membership(role='teacher') , requires_login=False)
def timu_manage():
form = SQLFORM.smartgrid(db.timu)
return dict(form=form)
@auth.requires(request.client=='127.0.0.1' or auth.has_membership(role='teacher') , requires_login=False)
def lianxi_manage():
form = SQLFORM.smartgrid(db.lianxi)
return dict(form=form)
@auth.requires(request.client=='127.0.0.1' or auth.has_membership(role='teacher') , requires_login=False)
def zuoti_manage():
form = SQLFORM.smartgrid(db.zuoti)
return dict(form=form)
@auth.requires(request.client=='127.0.0.1' or auth.has_membership(role='teacher') , requires_login=False)
def zuoye_manage():
form = SQLFORM.smartgrid(db.zuoye)
return dict(form=form)
@auth.requires(request.client=='127.0.0.1' or auth.has_membership(role='teacher') , requires_login=False)
def defen_manage():
form = SQLFORM.smartgrid(db.defen)
return dict(form=form)
@auth.requires(request.client=='127.0.0.1' or auth.has_membership(role='teacher') , requires_login=False)
def wangpan_manage():
form = SQLFORM.smartgrid(db.wangpan)
return dict(form=form)
@auth.requires(request.client=='127.0.0.1' or auth.has_membership(role='teacher') , requires_login=False)
def pigai():
keshi_id=request.args[0]
row=db((db.zuoye.keshi==keshi_id)&(db.zuoye.defen==None)).select().first()
if row:
form=SQLFORM(db.zuoye,row.id,upload=URL('download'))
vals=None
if form.process().accepted:
vals=response.url
redirect(request.url)
return dict(form=form,vals=vals)
else:
return dict(error=H3('没有可以批改的作业'))
@auth.requires(request.client=='127.0.0.1' or auth.has_membership(role='teacher') , requires_login=False)
def grade2():
lastxs=db().select(db.zuoye.zuozhe).last().zuozhe
banji=db.auth_user[lastxs].banji
if len(request.args):
banji=request.args[0]
rows=db((db.keshi.xuenian==xuenian)&(db.keshi.xueqi==xueqi)&(db.keshi.nianji==2)).select(left=db.keshi.on(db.keshi.kecheng==db.course.id))
return dict(rows=rows,banji=banji)
@auth.requires(request.client=='127.0.0.1' or auth.has_membership(role='teacher') , requires_login=False)
def grade1():
lastxs=db().select(db.zuoye.zuozhe).last().zuozhe
banji=db.auth_user[lastxs].banji
if len(request.args):
banji=request.args[0]
rows=db((db.keshi.xuenian==xuenian)&(db.keshi.xueqi==xueqi)&(db.keshi.nianji==1)).select(left=db.keshi.on(db.keshi.kecheng==db.course.id))
return dict(rows=rows,banji=banji)
def homeworks2():
keshi_id=request.args[0]
if month in range(2,8):
jie2=int(year)+1-2000
else:
jie2=int(year)+2-2000
banji=request.args[1]
rows=db((db.auth_user.banji==banji)&
(db.auth_user.jie==jie2)).select(
db.auth_user.last_name,
db.auth_user.first_name,
db.zuoye.ALL,left=db.zuoye.on((db.auth_user.id==db.zuoye.zuozhe)&(db.zuoye.keshi==keshi_id)),
orderby=db.auth_user.last_name)
return dict(rows=rows,banji=banji)
def homeworks1():
keshi_id=request.args[0]
if month in range(2,8):
jie1=int(year)+2-2000
else:
jie1=int(year)+3-2000
banji=request.args[1]
rows=db((db.auth_user.banji==banji)&
(db.auth_user.jie==jie1)).select(
db.auth_user.last_name,
db.auth_user.first_name,
db.zuoye.ALL,left=db.zuoye.on((db.auth_user.id==db.zuoye.zuozhe)&(db.zuoye.keshi==keshi_id)),
orderby=db.auth_user.last_name)
return dict(rows=rows,banji=banji)
def addwenti():
keshi_id=request.args[0]
keshi=db.keshi[keshi_id]
course=keshi.kecheng
timus=db(db.timu.course==course).select()
for timu in timus:
db.lianxi.update_or_insert(
(db.lianxi.keshi==keshi_id)&
(db.lianxi.timu==timu),
keshi=keshi_id,
timu=timu
)
lianxis=db(db.lianxi.keshi==keshi_id).select()
return dict(timus=timus,lianxis=lianxis)
'''
grade1,grade2,连接每个课程学习模块,学习模块里面有练习讲解,作业情况统计(交作业统计,得分统计)
用ajax实现统计和讲解部分显示答案
'''
def download():
"""
allows downloading of uploaded files
http://..../[app]/default/download/[filename]
"""
return response.download(request, db) | gpl-2.0 | -5,439,303,608,122,076,000 | 35.447853 | 142 | 0.613131 | false | 2.741117 | false | false | false |
aholkner/MonthlyVisitor | MonthlyVisitor.py | 1 | 72282 | import logging
logging.basicConfig()
from math import floor, sqrt
import os
import sys
import collections
import itertools
import random
# For profiling: import sys; sys.path.insert(0, '../bacon')
import bacon
import tiled
import spriter
import moon
from common import Rect, tween, update_tweens
GAME_WIDTH = 800
GAME_HEIGHT = 500
# See Game.on_key for cheats
ENABLE_CHEATS = False
try:
if sys.frozen:
ENABLE_CHEATS = False
except AttributeError:
pass
bacon.window.title = 'Monthly Visitor'
bacon.window.width = GAME_WIDTH
bacon.window.height = GAME_HEIGHT
bacon.window.resizable = False
bacon.window.content_scale = 1.0
font_ui = bacon.Font(None, 16)
image_cache = {}
def load_image(name):
if isinstance(name, bacon.Image):
return name
try:
return image_cache[name]
except KeyError:
image = image_cache[name] = bacon.Image('res/' + name)
return image
sound_monster = bacon.Sound('res/sound/monster.ogg')
sound_roar = bacon.Sound('res/sound/roar.ogg')
sound_agony1 = bacon.Sound('res/sound/agony1.ogg')
sound_agony2 = bacon.Sound('res/sound/agony2.ogg')
sound_footsteps1 = bacon.Sound('res/sound/footsteps1.ogg')
sound_footsteps2 = bacon.Sound('res/sound/footsteps2.ogg')
sound_crunch1 = bacon.Sound('res/sound/crunch1.ogg')
sound_pickup = bacon.Sound('res/sound/pickup.ogg')
sound_drop = bacon.Sound('res/sound/drop.ogg')
sound_click = bacon.Sound('res/sound/click.ogg')
sound_growl1 = bacon.Sound('res/sound/growl1.ogg')
sound_craft1 = bacon.Sound('res/sound/craft1.ogg')
sound_eat = bacon.Sound('res/sound/eat.ogg')
sound_chime = bacon.Sound('res/sound/chime.ogg')
sound_dawn = bacon.Sound('res/sound/dawn.ogg')
sound_scream = bacon.Sound('res/sound/scream.ogg')
sound_attackfence1 = bacon.Sound('res/sound/attackfence1.ogg')
sound_destroyfence1 = bacon.Sound('res/sound/destroyfence1.ogg')
sound_cow = bacon.Sound('res/sound/cow.ogg')
sound_chicken = bacon.Sound('res/sound/chicken.ogg')
sound_sheep = bacon.Sound('res/sound/sheep.ogg')
class SpriteSheet(object):
def __init__(self, image, cols, rows):
image = load_image(image)
cell_width = int(image.width / cols)
cell_height = int(image.height / rows)
self.cells = []
for row in range(rows):
cell_row = []
self.cells.append(cell_row)
y = cell_height * row
for col in range(cols):
x = cell_width * col
cell = image.get_region(x, y, x + cell_width, y + cell_height)
cell_row.append(cell)
def lpc_anims(image, cols, rows):
sheet = SpriteSheet(image, cols, rows)
up = sheet.cells[0]
left = sheet.cells[1]
down = sheet.cells[2]
right = sheet.cells[3]
def make_anim(images):
anim = Anim([Frame(image, image.width / 2, image.height - 4) for image in images])
anim.time_per_frame = 0.1
return anim
if cols > 4:
return dict(
idle_up = make_anim(up[:1]),
walk_up = make_anim(up[1:]),
idle_left = make_anim(left[:1]),
walk_left = make_anim(left[1:]),
idle_down = make_anim(down[:1]),
walk_down = make_anim(down[1:]),
idle_right = make_anim(right[:1]),
walk_right = make_anim(right[1:])
)
else:
return dict(
idle_up = make_anim(up[:1]),
walk_up = make_anim(up),
idle_left = make_anim(left[:1]),
walk_left = make_anim(left),
idle_down = make_anim(down[:1]),
walk_down = make_anim(down),
idle_right = make_anim(right[:1]),
walk_right = make_anim(right)
)
def spritesheet_anim(image, cols, rows, pivot_x, pivot_y):
sheet = SpriteSheet(image, cols, rows)
images = itertools.chain(*sheet.cells)
return Anim([Frame(image, pivot_x, pivot_y) for image in images])
def load_clothing_anims(name):
anims = lpc_anims('Clothing-' + name + '.png', 9, 4)
anims['death'] = spritesheet_anim('Clothing-' + name + '-Death.png', 6, 1, 32, 54)
return anims
class Frame(object):
def __init__(self, image, pivot_x, pivot_y):
self.image = load_image(image)
self.pivot_x = pivot_x
self.pivot_y = pivot_y
class Anim(object):
time_per_frame = 0.5
def __init__(self, frames):
self.frames = frames
clothing_anims = dict(
Body = load_clothing_anims('Body'),
BrownHat = load_clothing_anims('BrownHat'),
BrownShirt = load_clothing_anims('BrownShirt'),
BrownShoes = load_clothing_anims('BrownShoes'),
BrownSkirt = load_clothing_anims('BrownSkirt'),
ChainHood = load_clothing_anims('ChainHood'),
ChainTorso = load_clothing_anims('ChainTorso'),
GreenPants = load_clothing_anims('GreenPants'),
HairBlonde = load_clothing_anims('HairBlonde'),
Hood = load_clothing_anims('Hood'),
MetalBoots = load_clothing_anims('MetalBoots'),
MetalHat = load_clothing_anims('MetalHat'),
MetalPants = load_clothing_anims('MetalPants'),
PurpleJacket = load_clothing_anims('PurpleJacket'),
WhiteShirt = load_clothing_anims('WhiteShirt'),
Wolf = load_clothing_anims('Wolf'),
)
default_player_clothing = ['BrownShoes', 'GreenPants', 'WhiteShirt', 'HairBlonde']
naked_player_clothing = ['HairBlonde']
chicken_anims = lpc_anims('Chicken.png', 4, 4)
sheep_anims = lpc_anims('Sheep.png', 4, 4)
cow_anims = lpc_anims('Cow.png', 4, 4)
def distance(a, b):
dx = a.x - b.x
dy = a.y - b.y
return sqrt(dx * dx + dy * dy)
def dot(ax, ay, bx, by):
return ax * bx + ay * by
class Waypoint(object):
index = 0
def __init__(self, x, y):
self.x = x
self.y = y
class Sprite(object):
looping = True
def __init__(self, anim, x, y):
self.anim = anim
self.frame_index = 0
self.frame = anim.frames[0]
self._time = 0
self.x = x
self.y = y
def __lt__(self, other):
return self.y < other.y
def get_time(self):
return self._time
def set_time(self, time):
old_time = self._time
self._time = time
frame_index = int(time / self.anim.time_per_frame)
if self.looping:
self.frame_index = frame_index % len(self.anim.frames)
self.frame = self.anim.frames[self.frame_index]
else:
if self.frame_index < len(self.anim.frames) and frame_index >= len(self.anim.frames):
self.on_anim_finished()
self.frame_index = min(frame_index, len(self.anim.frames) - 1)
self.frame = self.anim.frames[self.frame_index]
time = property(get_time, set_time)
@property
def rect(self):
x = self.x - self.frame.pivot_x
y = self.y - self.frame.pivot_y
return Rect(x, y, x + self.frame.image.width, y + self.frame.image.height)
def on_anim_finished(self):
pass
def on_collide(self, tile):
pass
def on_moved_tile(self):
pass
def can_walk(self, tile):
return tile.walkable
def move_with_collision(self, tilemap, dx, dy, speed):
# Slice movement into tile-sized blocks for collision testing
size = sqrt(dx * dx + dy * dy)
if not size:
return False
dx /= size
dy /= size
size = min(size, speed * bacon.timestep)
did_move = False
while size > 0:
inc = min(size, tilemap.tile_width / 2, tilemap.tile_height / 2)
# Move along X
if dx:
incx = inc * dx
tile = tilemap.get_tile_at(self.x + incx, self.y)
if self.can_walk(tile):
self.x += incx
did_move = True
else:
if dx > 0:
self.x = tile.rect.x1 - 1
elif dx < 0:
self.x = tile.rect.x2 + 1
return self.on_collide(tile)
# Move along Y
if dy:
incy = inc * dy
tile = tilemap.get_tile_at(self.x, self.y + incy)
if self.can_walk(tile):
self.y += incy
did_move = True
else:
if dy > 0:
self.y = tile.rect.y1 - 1
elif dy < 0:
self.y = tile.rect.y2 + 1
return self.on_collide(tile)
size -= inc
tilemap.update_sprite_position(self)
new_tile = tilemap.get_tile_at(self.x, self.y)
if new_tile != self.current_tile:
self.current_tile = new_tile
self.on_moved_tile()
return did_move
def draw(self):
frame = self.frame
x = int(self.x - frame.pivot_x)
y = int(self.y - frame.pivot_y)
bacon.draw_image(frame.image, x, y)
# Update animation for next frame
self.time += bacon.timestep
class Character(Sprite):
name = None
running = False
walk_speed = 200
run_speed = 220
facing = 'down'
action = 'idle'
anim_name = 'idle'
cooldown = 0
is_wolf = False
is_dying = False
motive_food = 1.0
motive_food_trigger_wolf = 0.8
motive_food_trigger_human = 0.2
motive_food_trigger = motive_food_trigger_human
max_tilemap_path_size = 500
distance_player_pickup_animal = 24
distance_wolf_villager_search = GAME_WIDTH * 1.5
distance_wolf_villager_attack = 32
target_villager = None
eating_villager = False
current_tile = None
def __init__(self, anims, x, y, clothing=None):
self._time = 0.0
self.anims = anims
self.update_anim()
self.set_clothing(clothing)
super(Character, self).__init__(anims[self.anim_name], x, y)
self.path = None
self.target_item = None
def set_clothing(self, clothing):
if clothing:
self.clothing = [clothing_anims[x] for x in clothing]
else:
self.clothing = None
def draw(self):
frame = self.frame
x = int(self.x - frame.pivot_x)
y = int(self.y - frame.pivot_y)
bacon.draw_image(frame.image, x, y)
if self.clothing:
for layer in self.clothing:
anim = layer[self.anim_name]
frame = anim.frames[self.frame_index]
bacon.draw_image(frame.image, x, y)
# Update animation for next frame
self.time += bacon.timestep
def wait(self, time):
self.cooldown = max(self.cooldown, time)
def update_anim(self):
old_anim_name = self.anim_name
try:
self.anim_name = self.action + '_' + self.facing
self.anim = self.anims[self.anim_name]
except KeyError:
self.anim_name = self.action
self.anim = self.anims[self.anim_name]
if old_anim_name != self.anim_name:
self.time = 0
def die(self):
if self.is_dying:
return
sound_agony2.play()
self.is_dying = True
self.action = 'death'
self.path = None
self.looping = False
self.time = 0
self.update_anim()
game.menu = None
def on_anim_finished(self):
if self.is_dying:
game.screen = GameOverScreen()
def walk(self, arrived_func, hueristic_func):
self.path = tilemap.get_path(tilemap.get_tile_at(self.x, self.y), arrived_func, hueristic_func, self.max_tilemap_path_size)
if self.path and len(self.path) > 1 and self.path[0].rect.contains(self.x, self.y):
# Remove first path component if we're already in the tile and past the center of it
tx0 = self.path[0].rect.center_x
ty0 = self.path[0].rect.center_y
tx1 = self.path[1].rect.center_x
ty1 = self.path[1].rect.center_y
if dot(self.x - tx0, self.y - ty0, self.x - tx1, self.y - ty1) <= 0:
del self.path[0]
return self.path
def walk_to_tile(self, tile):
self.target_item = None
return self.walk(path_arrived(tile), path_heuristic_player(tile))
def walk_to(self, x, y):
tile = tilemap.get_tile_at(x, y)
return self.walk_to_tile(tile)
def walk_to_distant_object(self, obj):
if distance(obj, self) > GAME_WIDTH * 0.5:
dx = obj.x - self.x
dy = obj.y - self.y
m = GAME_WIDTH * 0.25 / sqrt(dx * dx + dy * dy)
dx *= m
dy *= m
return self.walk_to(self.x + dx, self.y + dy)
else:
return self.walk_to(obj.x, obj.y)
def walk_to_waypoint(self, target_index=None):
waypoints.sort(key=lambda v:distance(v, self))
for waypoint in waypoints:
if target_index is not None and waypoint.index != target_index:
continue
if self.walk_to_distant_object(waypoint):
return True
def update_player_movement(self):
dx = 0
dy = 0
if bacon.Keys.up in bacon.keys:
dy += -32
if bacon.Keys.down in bacon.keys:
dy += 32
if bacon.Keys.left in bacon.keys:
dx += -32
if bacon.Keys.right in bacon.keys:
dx += 32
if dx or dy:
self.update_facing(dx, dy)
self.move_with_collision(tilemap, dx, dy, self.run_speed if self.running else self.walk_speed)
self.path = None
self.target_item = None
self.action = 'walk'
elif not self.path:
self.action = 'idle'
def update_walk_target_movement(self):
if not self.path:
return
target_tile = self.path[0]
dx = target_tile.rect.center_x - self.x
dy = target_tile.rect.center_y - self.y
self.update_facing(dx, dy)
if self.move_with_collision(tilemap, dx, dy, self.run_speed if self.running else self.walk_speed):
self.action = 'walk'
else:
# Didn't move, so we've arrived at this path node
if self.path:
del self.path[0]
if not self.path:
self.on_arrive(target_tile)
self.update_anim()
def on_collide(self, tile):
if self.is_wolf:
# Check for destructibles on tile
for item in tile.items:
if item.attackable_wolf:
item.on_attack()
return True
if self.path:
if self.path[0] == tile:
# Arrived at non-walkable tile
del self.path[0]
if not self.path:
self.on_arrive(tile)
return False
# Path goes through a non-walkable tile, stop walking
self.path = None
self.target_item = None
self.action = 'idle'
return False
def on_moved_tile(self):
if self.eating_villager:
# Random chance of blood dribble
if random.random() < 0.3:
spawn_blood(self.x, self.y, dribble=True)
def on_arrive(self, tile):
self.action = 'idle'
def update_facing(self, dx, dy):
if abs(dy) > abs(dx * 2):
if dy < 0:
self.facing = 'up'
elif dy > 0:
self.facing = 'down'
elif dx < 0:
self.facing = 'left'
elif dx > 0:
self.facing = 'right'
def add_food_motive(self, amount):
self.motive_food = min(self.motive_food + amount, 1.0)
def update_player_motives(self):
self.motive_food = max(self.motive_food - bacon.timestep * 0.002, 0)
def update_wolf_motives(self):
self.motive_food = max(self.motive_food - bacon.timestep * 0.015, 0.1)
# If we've reached the villager we're after
if self.target_villager and distance(self, self.target_villager) < self.distance_wolf_villager_attack:
# Remove villager's factories
if self.target_villager.name:
factories[:] = [f for f in factories if f.owner != self.target_villager.name]
# Remove villager
villagers.remove(self.target_villager)
tilemap.remove_sprite(self.target_villager)
self.target_villager = None
self.eating_villager = True
sound_roar.play()
sound_agony1.play()
# Small bite
self.add_food_motive(0.1)
spawn_blood(self.x, self.y)
self.walk_to_waypoint()
self.wait(0.8)
return
if self.cooldown > 0:
self.cooldown -= bacon.timestep
self.action = 'idle'
self.update_anim()
return
# If we're standing on food, eat it
tile = tilemap.get_tile_at(self.x, self.y)
for item in tile.items:
if item.food_wolf:
ConsumeAction(item)()
if self.motive_food < self.motive_food_trigger:
if not self.path:
# Search for nearby villagers
villagers.sort(key=lambda v:distance(v, self))
for villager in villagers:
if distance(self, villager) < self.distance_wolf_villager_search:
if self.walk_to(villager.x, villager.y):
self.target_villager = villager
return
# Search for nearby items that are food -- note that the returned path is not optimal, but
# looks more organic anyway
if self.walk(path_arrived_wolf_food(), path_hueristic_wolf_search()):
return
# Walk towards nearest villager over multiple screens
for villager in villagers:
if self.walk_to_distant_object(villagers[0]):
self.target_villager = villager
return
# Couldn't path in direction of any villager, move to nearest waypoint instead
waypoints.sort(key = lambda v:distance(v, self))
if self.walk_to_distant_object(waypoints[0]):
return
if not self.path:
# Random walk
dx = random.randrange(-3, 3) * 32
dy = random.randrange(-3, 3) * 32
self.wait(random.randrange(1, 2))
self.path = [tilemap.get_tile_at(self.x + dx, self.y + dy)]
self.update_walk_target_movement()
def get_drop_tile(self):
tile = tilemap.get_tile_at(self.x, self.y)
if not tile.items:
return tile
if self.facing == 'left':
tile = tilemap.get_tile_at(self.x - 32, self.y)
elif self.facing == 'right':
tile = tilemap.get_tile_at(self.x + 32, self.y)
elif self.facing == 'up':
tile = tilemap.get_tile_at(self.x, self.y - 32)
elif self.facing == 'down':
tile = tilemap.get_tile_at(self.x, self.y + 32)
if not tile.items:
return tile
candidates = [
tilemap.get_tile_at(self.x, self.y - 32),
tilemap.get_tile_at(self.x, self.y + 32),
tilemap.get_tile_at(self.x - 32, self.y),
tilemap.get_tile_at(self.x + 32, self.y),
tilemap.get_tile_at(self.x - 32, self.y - 32),
tilemap.get_tile_at(self.x - 32, self.y + 32),
tilemap.get_tile_at(self.x + 32, self.y - 32),
tilemap.get_tile_at(self.x + 32, self.y + 32)
]
random.shuffle(candidates)
for candidate in candidates:
if not candidate.items:
return candidate
return None
def get_behind_tile(self):
dx = dy = 0
if self.facing == 'left':
dx = 32
elif self.facing == 'right':
dx = -32
elif self.facing == 'up':
dy = 32
else:
dy = -32
return tilemap.get_tile_at(self.x + dx, self.y + dy)
class Player(Character):
run_speed = 320
naked = False
footsteps_voice = None
attack_voice = None
def set_footsteps(self, sound):
if self.footsteps_voice:
if self.footsteps_voice._sound == sound:
return
self.footsteps_voice.stop()
self.footsteps_voice = None
if sound:
self.footsteps_voice = bacon.Voice(sound, loop=True)
self.footsteps_voice.play()
def set_attack_sound(self, sound):
if self.attack_voice and self.attack_voice.playing:
return
self.attack_voice = bacon.Voice(sound)
self.attack_voice.play()
def can_walk(self, tile):
if self.naked and not tile.walkable_entrance:
# Find owner of this shop, prevent entry if we didn't spawn here
for villager in villagers:
if villager.name == tile.entrance_owner:
if not villager.spawned_in_shop:
return False
return tile.walkable
def start_wolf(self):
sound_monster.play()
self.motive_food_trigger = self.motive_food_trigger_wolf
self.is_wolf = True
self.naked = False
self.path = None
self.running = True
self.action = 'idle'
self.update_anim()
self.set_clothing(['Wolf'])
for item in inventory.items[:]:
if isinstance(item, Fence):
item.destroy()
else:
inventory.drop(item, self.get_drop_tile())
def end_wolf(self):
sound_dawn.play()
self.motive_food_trigger = self.motive_food_trigger_human
self.is_wolf = False
self.path = None
self.running = False
self.action = 'idle'
self.update_anim()
self.set_clothing(naked_player_clothing)
self.naked = True
if self.eating_villager:
self.on_arrive(tilemap.get_tile_at(self.x, self.y))
# Check if we're in a shop region, and if so disable the entrance blocker
# so we can leave
for villager in villagers:
if villager.shop_rect and villager.shop_rect.contains(self.x, self.y):
villager.spawned_in_shop = True
else:
villager.spawned_in_shop = False
# Move villager to center of shop to talk to naked player
if villager.shop_rect:
villager.walk_to(villager.shop_rect.center_x, villager.shop_rect.center_y)
def on_collide(self, tile):
if not tile.walkable_entrance and player.naked:
game.show_message('"You can\'t come in here like that, get some clothes on!"')
return super(Player, self).on_collide(tile)
def on_arrive(self, tile):
self.action = 'idle'
if self.eating_villager:
spawn_blood(self.x, self.y)
spawn_item_on_tile(self.get_drop_tile(), 'Bone', 'BoneRibs')
spawn_item_on_tile(self.get_drop_tile(), 'Bone', 'BoneSkull')
spawn_item_on_tile(self.get_drop_tile(), 'Bone', 'BoneLegs')
spawn_item_on_tile(self.get_drop_tile(), 'Bone', 'Bone')
sound_crunch1.play()
self.eating_villager = False
self.add_food_motive(1.0)
self.wait(2.5)
# Check if we arrived on an animal
for animal in animals:
if animal.can_pick_up and distance(self, animal) < self.distance_player_pickup_animal:
if not self.target_item and not inventory.is_full:
# Only pick up the animal if we weren't targetting anything.
item = animal.item_cls(animal.item_cls.get_default_anim(), 0, 0)
inventory.add_item(item)
tilemap.remove_sprite(animal)
animals.remove(animal)
return
# Normal pick_up
if self.target_item:
target_item = self.target_item
self.target_item = None
target_item.on_player_interact(tile)
class Animal(Character):
walk_speed = 50
run_speed = 110
can_pick_up = False
run_cooldown = 0
run_cooldown_time = 1.5 # How long to run before exhaustion
danger_radius = 100
snare_attract_radius = 512
snare_catch_radius = 8
sound = None
sound_cooldown = -1
def can_walk(self, tile):
return tile.walkable and tile.walkable_animal
def update_animal_movement(self):
if self.running:
self.run_cooldown -= bacon.timestep
self.sound_cooldown -= bacon.timestep
# Check for getting snared
for snare in snares:
if not snare.occupied and snare.rect.contains(self.x, self.y):
if self.sound:
self.sound.play()
snare.occupied = True
self.snare = snare
self.x = snare.x
self.y = snare.y
tilemap.update_sprite_position(self)
tilemap.get_tile_at(self.x, self.y).items.append(self)
animals.remove(self)
self.__class__ = self.item_cls
return
if not self.path:
if distance(self, player) < self.danger_radius and self.run_cooldown > 0:
if self.sound and self.sound_cooldown < 0:
self.sound.play()
self.sound_cooldown = 5.0
self.running = True
self.run_cooldown -= bacon.timestep
dx = random.randrange(1, 5) * 32
dy = random.randrange(0, 5) * 32
if player.x > self.x:
dx = -dx
if player.y > self.y:
dy = -dy
self.path = [tilemap.get_tile_at(self.x + dx, self.y + dy)]
self.wait(random.randrange(1, 4) / 4.0)
else:
if self.running:
self.running = False
self.wait(2)
return
if self.cooldown > 0:
self.cooldown -= bacon.timestep
return
# Reset exhaustion
self.run_cooldown = self.run_cooldown_time
# Check for nearby snares and walk towards
for snare in snares:
if not snare.occupied and distance(snare, self) < self.snare_attract_radius:
self.running = False
self.path = [tilemap.get_tile_at(snare.x, snare.y)]
# Random walk
if not self.path:
dx = random.randrange(-4, 4) * 32
dy = random.randrange(-4, 4) * 32
self.wait(random.randrange(1, 8))
self.path = [tilemap.get_tile_at(self.x + dx, self.y + dy)]
self.update_walk_target_movement()
def on_collide(self, tile):
self.cooldown = 0.1
self.run_cooldown = 0
class ChickenAnimal(Animal):
walk_speed = 50
run_speed = 110
can_pick_up = True
run_cooldown = 0
run_cooldown_time = 1.5 # How long to run before exhaustion
danger_radius = 100
snare_attract_radius = 512
snare_catch_radius = 8
sound = sound_chicken
class SheepAnimal(Animal):
walk_speed = 50
run_speed = 170
run_cooldown = 0
run_cooldown_time = 999 # How long to run before exhaustion
danger_radius = 200
snare_attract_radius = 512
snare_catch_radius = 8
sound = sound_sheep
class CowAnimal(Animal):
walk_speed = 50
run_speed = 170
run_cooldown = 0
run_cooldown_time = 999 # How long to run before exhaustion
danger_radius = 200
snare_attract_radius = 512
snare_catch_radius = 8
sound = sound_cow
class Villager(Character):
walk_speed = 50
run_speed = 50
spawned_in_shop = False
shop_rect = None
def can_walk(self, tile):
if not tile.walkable_villager or not tile.walkable_entrance:
return False
return tile.walkable and tile.walkable_villager
def update_villager_movement(self):
if not self.path:
if self.cooldown > 0:
self.cooldown -= bacon.timestep
return
if not player.naked:
dx = random.randrange(-4, 4) * 32
dy = random.randrange(-4, 4) * 32
self.path = [tilemap.get_tile_at(self.x + dx, self.y + dy)]
self.wait(random.randrange(1, 8))
self.update_walk_target_movement()
def on_arrive(self, tile):
super(Villager, self).on_arrive(tile)
if player.naked:
self.facing = 'down'
self.update_anim()
_spawn_classes = {}
def spawn(cls):
_spawn_classes[cls.__name__] = cls
return cls
def spawn_item_on_tile(tile, class_name, anim_name=None):
try:
cls = _spawn_classes[class_name]
except KeyError:
print('Missing spawn class %s' % class_name)
return
try:
anim = object_anims[anim_name]
except KeyError:
anim = cls.get_default_anim()
if not anim:
return
if tile:
x = tile.rect.center_x
y = tile.rect.center_y
item = cls(anim, x, y)
tile.items.append(item)
tilemap.add_sprite(item)
return item
class Item(Sprite):
walkable = True
can_pick_up = True
is_consumed_in_recipe = True
anim_name = None
name = None
food_human = 0
food_wolf = 0
path_cost_wolf = 0
attackable_wolf = False
show_durability = False
@classmethod
def get_default_anim(cls):
anim_name = cls.anim_name
if not cls.anim_name:
anim_name = cls.__name__
try:
return object_anims[anim_name]
except KeyError:
anim = object_anims[anim_name] = Anim([Frame(cls.inventory_image, 16, 16)])
return anim
@classmethod
def get_name(cls):
if cls.name:
return cls.name
return cls.__name__
def destroy(self):
if self in inventory.items:
inventory.remove(self)
else:
tile = tilemap.get_tile_at(self.x, self.y)
tile.remove_item(self)
tilemap.remove_sprite(self)
def on_player_interact(self, tile):
if self.can_pick_up and not inventory.is_full:
inventory.pick_up(self, tile)
else:
x, y = camera.world_to_view(self.x, self.y)
show_craft_menu(self, x, y)
def on_pick_up(self):
tilemap.remove_sprite(self)
def on_dropped(self, tile):
tile.add_item(self)
tilemap.add_sprite(self)
def on_used_in_recipe(self, recipe):
pass
def on_consumed(self):
if self.food_human and not player.is_wolf:
player.add_food_motive(self.food_human)
elif self.food_wolf and player.is_wolf:
player.add_food_motive(self.food_wolf)
player.wait(0.5)
sound_eat.play()
def on_attack(self):
pass
@spawn
class Tree(Item):
walkable = False
can_pick_up = False
anim_name = 'Tree1.png'
path_cost_wolf = 99999
def on_used_in_recipe(self, recipe):
self.anim = object_anims['TreeStump']
self.__class__ = TreeStump
@spawn
class TreeStump(Item):
name = 'Tree Stump'
can_pick_up = False
@spawn
class Sapling(Item):
can_pick_up = False
anim_name = 'Sapling.png'
@spawn
class BerryPlant(Item):
name = 'Berry Plant'
can_pick_up = False
def on_used_in_recipe(self, recipe):
self.anim = object_anims['BerryPlantEmpty']
self.__class__ = BerryPlantEmpty
@spawn
class BerryPlantEmpty(Item):
name = 'Berry Plant'
can_pick_up = False
@spawn
class Berries(Item):
food_human = 0.05
@spawn
class Reed(Item):
anim_name = 'Reed.png'
@spawn
class StrangePlant(Item):
name = 'Rock Flower'
anim_name = 'StrangePlant.png'
@spawn
class VenusFlyTrap(Item):
pass
@spawn
class SuspiciousHerbs(Item):
pass
@spawn
class Mushroom(Item):
food_human = 0.05
@spawn
class Clothes(Item):
pass
@spawn
class Wood(Item):
name = 'Wood'
@spawn
class Boulder(Item):
walkable = False
can_pick_up = False
path_cost_wolf = 99999
def on_used_in_recipe(self, recipe):
self.destroy()
@spawn
class Rock(Item):
name = 'Rock'
@spawn
class IronOre(Item):
name = 'Iron Ore'
@spawn
class IronRock(Item):
name = 'Iron Rock'
walkable = False
can_pick_up = False
path_cost_wolf = 99999
def on_used_in_recipe(self, recipe):
self.destroy()
@spawn
class CoalRock(Item):
name = 'Coal Rock'
walkable = False
can_pick_up = False
path_cost_wolf = 99999
def on_used_in_recipe(self, recipe):
self.destroy()
@spawn
class Coal(Item):
pass
@spawn
class Bone(Item):
pass
@spawn
class RawMeat(Item):
name = 'Raw Meat'
food_wolf = 0.4
@spawn
class CookedMeat(Item):
name = 'Cooked Meat'
food_human = 0.3
@spawn
class Vegetable(Item):
food_human = 0.05
class Tool(Item):
show_durability = True
durability = 1.0
is_consumed_in_recipe = False
def on_used_in_recipe(self, recipe):
super(Tool, self).on_used_in_recipe(recipe)
self.durability -= recipe.tool_durability_effect
if self.durability <= 0:
self.destroy()
@spawn
class Pick(Tool):
pass
@spawn
class Axe(Tool):
pass
@spawn
class Cleaver(Tool):
pass
@spawn
class Fire(Item):
walkable = False
path_cost_wolf = 99999
can_pick_up = False
durability = 1.0
is_consumed_in_recipe = False
def on_used_in_recipe(self, recipe):
super(Fire, self).on_used_in_recipe(recipe)
self.durability -= recipe.tool_durability_effect
if self.durability <= 0:
self.__class__ = UsedFire
self.anim = object_anims['UsedFire']
@spawn
class UsedFire(Item):
can_pick_up = False
@spawn
class Toadstool(Item):
pass
@spawn
class Fence(Item):
walkable = False
path_cost_wolf = 10
attackable_wolf = True
hp = 2.5
fence_anims = {}
def on_pick_up(self):
super(Fence, self).on_pick_up()
self.update_fence_and_adjacent()
def on_dropped(self, tile):
super(Fence, self).on_dropped(tile)
self.update_fence_and_adjacent()
# Move player into walkable tile; try backward facing direction first
tile = player.get_behind_tile()
if tile.walkable:
player.x = tile.rect.center_x
player.y = tile.rect.center_y
player.path = []
tilemap.update_sprite_position(player)
sound_craft1.play()
def update_fence_and_adjacent(self):
adjacent = [
tilemap.get_tile_at(self.x - tilemap.tile_width, self.y),
tilemap.get_tile_at(self.x + tilemap.tile_width, self.y),
tilemap.get_tile_at(self.x, self.y - tilemap.tile_height),
tilemap.get_tile_at(self.x, self.y + tilemap.tile_height),
]
self.update_fence()
for tile in adjacent:
for item in tile.items:
if isinstance(item, Fence):
item.update_fence()
def update_fence(self):
fmt = ''
if self.has_neighbour_fence(self.x, self.y - tilemap.tile_height):
fmt += 'U'
if self.has_neighbour_fence(self.x, self.y + tilemap.tile_height):
fmt += 'D'
if self.has_neighbour_fence(self.x - tilemap.tile_width, self.y):
fmt += 'L'
if self.has_neighbour_fence(self.x + tilemap.tile_width, self.y):
fmt += 'R'
self.anim = self.fence_anims[fmt]
def has_neighbour_fence(self, x, y):
tile = tilemap.get_tile_at(x, y)
for item in tile.items:
if isinstance(item, Fence):
return True
return False
def on_attack(self):
player.set_attack_sound(sound_attackfence1)
self.hp -= bacon.timestep
if self.hp <= 0:
sound_destroyfence1.play()
self.destroy()
@spawn
class StrongFence(Fence):
name = 'Strong Fence'
path_cost_wolf = 10
hp = 5.0
fence_anims = {}
@spawn
class SteelFence(Fence):
name = 'Steel Fence'
path_cost_wolf = 10
hp = 10.0
fence_anims = {}
@spawn
class Grass(Item):
pass
@spawn
class Bread(Item):
food_human = 0.2
@spawn
class Stick(Item):
pass
@spawn
class Iron(Item):
pass
@spawn
class Steel(Item):
pass
@spawn
class Grass(Item):
pass
@spawn
class Rope(Item):
pass
@spawn
class Snare(Item):
occupied = None
def destroy(self):
if self in snares:
snares.remove(self)
return super(Snare, self).destroy()
def on_dropped(self, tile):
super(Snare, self).on_dropped(tile)
snares.append(self)
# Move player down; try backward facing direction first
tile = tilemap.get_tile_at(player.x, player.y + 32)
if tile.walkable:
player.x = tile.rect.center_x
player.y = tile.rect.center_y
player.path = []
tilemap.update_sprite_position(player)
sound_craft1.play()
def on_pick_up(self):
try:
snares.remove(self)
except ValueError:
pass
@spawn
class AnimalNet(Snare):
anim_name = 'Net.png'
name = 'Animal Net'
class AnimalItem(Item):
food_wolf = 0.3
animal_anims = None
animal_cls = None
snare = None
def on_dropped(self, tile):
animal = self.animal_cls(self.animal_anims, tile.rect.center_x, tile.rect.center_y)
animal.item_cls = self.__class__
tilemap.add_sprite(animal)
animals.append(animal)
def on_consumed(self):
if self.snare:
self.snare.destroy()
self.snare = None
spawn_blood(player.x, player.y)
return super(AnimalItem, self).on_consumed()
def on_used_in_recipe(self, recipe):
if self.snare:
self.snare.destroy()
spawn_blood(player.x, player.y)
self.destroy()
return super(AnimalItem, self).on_used_in_recipe(recipe)
@spawn
class Chicken(AnimalItem):
animal_cls = ChickenAnimal
food_wolf = 0.3
animal_anims = chicken_anims
@spawn
class Sheep(AnimalItem):
animal_cls = SheepAnimal
food_wolf = 1.0
animal_anims = sheep_anims
can_pick_up = False
@spawn
class Cow(AnimalItem):
animal_cls = CowAnimal
food_wolf = 1.0
animal_anims = cow_anims
can_pick_up = False
class Recipe(object):
'''
:param output: class to generate
:param inputs: dict of class to count
'''
sound = sound_craft1
def __init__(self, output, inputs, text=None, sound=None, tool_durability_effect=0.25, outputs_to_inventory=True):
if not isinstance(output, collections.Iterable):
output = [output]
self.outputs = output
self.inputs = inputs
self.text = text
if output:
self.name = output[0].__name__
if sound:
self.sound = sound
self.tool_durability_effect = tool_durability_effect
self.outputs_to_inventory = outputs_to_inventory
def is_input(self, input):
return input.__class__ in self.inputs
def is_available(self, extra_item):
for input, count in self.inputs.items():
if extra_item and extra_item.__class__ is input:
count -= 1
if inventory.get_class_count(input) < count:
return False
return True
def on_craft(self):
self.sound.play()
class ClothesRecipe(Recipe):
name = 'Clothes'
def is_available(self, extra_item):
if not super(ClothesRecipe, self).is_available(extra_item):
return False
if not player.naked:
return False
return True
def on_craft(self):
player.set_clothing(default_player_clothing)
player.naked = False
recipes = [
Recipe([Wood, Wood, Wood], {Axe: 1, Tree: 1}, 'Chop down for wood', tool_durability_effect=0.25, outputs_to_inventory=False),
Recipe([Coal], {Pick: 1, CoalRock: 1}, 'Mine for coal', tool_durability_effect=0.25),
Recipe([IronOre, IronOre, IronOre], {Pick: 1, IronRock: 1}, 'Mine for iron ore', tool_durability_effect=0.25, outputs_to_inventory=False),
Recipe([Rock, Rock], {Pick: 1, Boulder: 1}, 'Smash boulder', tool_durability_effect=0.5, outputs_to_inventory=False),
Recipe([Iron], {Pick: 1, IronOre: 2}, 'Forge Iron', tool_durability_effect=0.25),
Recipe(Axe, {Stick: 1, Rock: 1}),
Recipe(Pick, {Stick: 1, Iron: 1}),
Recipe(Steel, {Fire: 1, Iron: 1, Coal: 1}, tool_durability_effect=0.2),
Recipe(Cleaver, {Stick: 1, Steel: 1}),
Recipe(Fire, {Wood: 2, Coal: 1}, outputs_to_inventory=False),
Recipe(Fence, {Wood: 2}),
Recipe(StrongFence, {Fence: 1, Wood: 2}),
Recipe(SteelFence, {Steel: 4}),
Recipe(RawMeat, {Chicken: 1}, 'Kill for meat', sound=sound_scream),
Recipe([RawMeat, RawMeat], {Sheep: 1, Cleaver: 1}, 'Kill for meat', sound=sound_scream, tool_durability_effect=0.25),
Recipe([RawMeat, RawMeat, RawMeat], {Cow: 1, Cleaver: 1}, 'Kill for meat', sound=sound_scream, tool_durability_effect=0.25),
Recipe(CookedMeat, {Fire: 1, RawMeat: 1}, 'Cook meat', sound=sound_pickup, tool_durability_effect=0.5),
#Recipe(Snare, {Rope: 2, Vegetable: 1}),
Recipe(AnimalNet, {Rope: 2, Rock: 2, Vegetable: 1}),
Recipe(Rope, {Grass: 3}),
Recipe(Stick, {Sapling: 1}, "Break off stick", sound=sound_pickup),
Recipe(Berries, {BerryPlant: 1}, 'Pick berries', sound=sound_pickup),
ClothesRecipe([], {Clothes: 1}, 'Wear clothes'),
]
def path_arrived(destination):
def func(tile):
return tile is destination
return func
def path_heuristic_player(destination):
def func(tile):
if not tile.walkable and tile is not destination:
return 99999
return abs(destination.tx - tile.tx) + abs(destination.ty - tile.ty) + tile.path_cost
return func
def path_arrived_wolf_food():
def func(tile):
for item in tile.items:
if item.food_wolf:
return True
return func
def path_hueristic_wolf_search():
def func(tile):
if not tile._walkable:
return 99999
if tile.items:
return max(item.path_cost_wolf for item in tile.items)
return tile.path_cost
return func
class Factory(object):
def __init__(self, tile, spawn_class_name, owner=None, cooldown_time=70):
self.spawn_class_name = spawn_class_name
self.tile = tile
self.cooldown_time = cooldown_time
self.cooldown = 0
self.owner = owner
def produce(self):
if not self.tile.items:
spawn_item_on_tile(self.tile, self.spawn_class_name)
def update(self):
if self.tile.items:
self.cooldown = self.cooldown_time
else:
self.cooldown -= bacon.timestep
if self.cooldown <= 0:
self.produce()
factories = []
class Camera(object):
def __init__(self):
self.x = 0
self.y = 0
def apply(self):
bacon.translate(-self.x + GAME_WIDTH / 2, -self.y + GAME_HEIGHT / 2)
def view_to_world(self, x, y):
return x + self.x - GAME_WIDTH / 2, y + self.y - GAME_HEIGHT / 2
def world_to_view(self, x, y):
return x - self.x + GAME_WIDTH / 2, y - self.y + GAME_HEIGHT / 2
def clamp_to_bounds(self, bounds):
if self.x - GAME_WIDTH / 2 < bounds.x1:
self.x = bounds.x1 + GAME_WIDTH / 2
if self.x + GAME_WIDTH / 2 > bounds.x2:
self.x = bounds.x2 - GAME_WIDTH / 2
if self.y - GAME_HEIGHT / 2 < bounds.y1:
self.y = bounds.y1 + GAME_HEIGHT / 2
if self.y + GAME_HEIGHT / 2 > bounds.y2:
self.y = bounds.y2 - GAME_HEIGHT / 2
def get_bounds(self):
return Rect(self.x - GAME_WIDTH / 2, self.y - GAME_HEIGHT / 2, self.x + GAME_WIDTH /2 , self.y + GAME_HEIGHT / 2)
class MenuHint(object):
def __init__(self):
self.lines = []
def layout(self):
self.height = sum(line.content_height for line in self.lines)
self.width = max(line.content_width for line in self.lines)
def draw(self):
x = self.x
y = self.y
bacon.set_color(0.2, 0.2, 0.2, 1.0)
bacon.fill_rect(x, y, x + self.width, y - self.height)
bacon.set_color(1, 1, 1, 1)
for line in self.lines:
line.x = x
line.y = y
bacon.draw_glyph_layout(line)
y -= line.content_height
class MenuRecipeHint(MenuHint):
def __init__(self, recipe, extra_item):
self.x = 0
self.y = 0
self.lines = []
style = bacon.Style(font_ui)
for (cls, count) in recipe.inputs.items():
satisfied_count = count
if extra_item and isinstance(extra_item, cls):
satisfied_count -= 1
satisfied = inventory.get_class_count(cls) >= satisfied_count
text = '[%s] %dx %s' % ('X' if satisfied else ' ', count, cls.get_name())
run = bacon.GlyphRun(style, text)
self.lines.append(bacon.GlyphLayout([run], 0, 0, width=280, height=None, align=bacon.Alignment.left, vertical_align=bacon.VerticalAlignment.bottom))
self.layout()
self.content_width = max(line.content_width for line in self.lines)
class MenuTextHint(MenuHint):
def __init__(self, text):
self.x = 0
self.y = 0
self.lines = []
style = bacon.Style(font_ui)
run = bacon.GlyphRun(style, text)
self.lines.append(bacon.GlyphLayout([run], 0, 0, width=280, height=None, align=bacon.Alignment.left, vertical_align=bacon.VerticalAlignment.bottom))
self.layout()
self.content_width = self.lines[0].content_width
class MenuItem(object):
def __init__(self, text, x, y, func, disabled=False, hint=None):
self.text = text
self.func = func
self.disabled = disabled
self.hint = hint
style = bacon.Style(font_ui)
width = 250
self.glyph_layout = bacon.GlyphLayout([bacon.GlyphRun(style, text)],
x, y,
width, style.font.descent - style.font.ascent,
align=bacon.Alignment.left,
vertical_align=bacon.VerticalAlignment.top)
self.rect = Rect(x, y, x + self.glyph_layout.content_width, y + self.glyph_layout.content_height)
def draw(self):
if self.rect.contains(bacon.mouse.x, bacon.mouse.y):
self.draw_hint()
bacon.set_color(0.6, 0.6, 0.6, 1.0)
else:
bacon.set_color(0.3, 0.3, 0.3, 1.0)
self.rect.fill()
if self.disabled:
bacon.set_color(0.7, 0.7, 0.7, 1.0)
else:
bacon.set_color(1.0, 1.0, 1.0, 1.0)
bacon.draw_glyph_layout(self.glyph_layout)
def draw_hint(self):
if self.hint:
if self.rect.x2 + self.hint.content_width < GAME_WIDTH:
self.hint.x = self.rect.x2
else:
self.hint.x = self.rect.x1 - self.hint.content_width
self.hint.y = self.rect.y2
self.hint.draw()
class Menu(object):
def __init__(self, x, y):
self.x = x
self.y = y
self.item_y = y
self.items = []
self.rect = None
def add(self, text, func=None, disabled=False, hint=None):
item = MenuItem(text, 0, self.item_y, func, disabled=disabled, hint=hint)
self.items.append(item)
self.item_y = item.rect.y2
self.rect = None
def layout(self):
width = max(item.rect.width for item in self.items)
height = sum(item.rect.height for item in self.items)
self.y -= height
self.rect = Rect(self.x, self.y, self.x + width, self.y + height)
for item in self.items:
item.rect.y1 -= height
item.rect.y2 -= height
item.rect.x1 = self.x
item.rect.x2 = item.rect.x1 + width
item.glyph_layout.x = item.rect.x1
item.glyph_layout.y = item.rect.y1
def on_mouse_button(self, button, pressed):
if not self.rect:
self.layout()
if self.rect.contains(bacon.mouse.x, bacon.mouse.y):
for item in self.items:
if item.rect.contains(bacon.mouse.x, bacon.mouse.y):
if item.func:
item.func()
game.menu = None
return
if pressed:
game.menu = None
def draw(self):
if not self.rect:
self.layout()
for item in self.items:
item.draw()
class DropAction(object):
def __init__(self, item):
self.item = item
def __call__(self):
tile = player.get_drop_tile()
if tile:
inventory.drop(self.item, tile)
class PickUpAction(object):
def __init__(self, item, tile):
self.item = item
self.tile = tile
def __call__(self):
inventory.pick_up(self.item, self.tile)
class CraftAction(object):
def __init__(self, recipe, item):
self.recipe = recipe
self.item = item
def __call__(self):
inventory.craft(self.recipe, self.item)
class ConsumeAction(object):
def __init__(self, item):
self.item = item
def __call__(self):
self.item.destroy()
self.item.on_consumed()
def show_craft_menu(item, x, y):
game.menu = Menu(x - 16, y - 32)
extra_item = item if not item in inventory.items else None
for recipe in recipes:
if recipe.is_input(item):
text = recipe.text
hint = MenuRecipeHint(recipe, extra_item)
if not text:
text = 'Craft %s' % recipe.name
if recipe.is_available(extra_item):
game.menu.add(text, CraftAction(recipe, item), hint=hint)
else:
game.menu.add(text, disabled=True, hint=hint)
if item.food_human:
game.menu.add('Eat %s' % item.get_name(), ConsumeAction(item))
elif item.food_wolf:
game.menu.add('Eat %s' % item.get_name(), disabled=True, hint=MenuTextHint('Can be eaten during full moon'))
if item in inventory.items:
tile = player.get_drop_tile()
if isinstance(item, Fence) and tile is tilemap.get_tile_at(player.x, player.y):
# Ensure position behind player is free if drop tile is player
if not player.get_behind_tile().walkable:
tile = None
if tile:
game.menu.add('Drop %s' % item.get_name(), DropAction(item))
else:
game.menu.add('Drop %s' % item.get_name(), disabled=True)
elif item.can_pick_up:
if inventory.is_full:
game.menu.add('Pick up %s' % item.get_name(), disabled=True, hint=MenuTextHint('Inventory full'))
else:
game.menu.add('Pick up %s' % item.get_name(), PickUpAction(item, tilemap.get_tile_at(item.x, item.y)))
if not game.menu.items:
game.menu = None
else:
sound_click.play()
class Inventory(object):
slots = 6
slot_image = load_image('InventorySlot.png')
def __init__(self):
self.items = []
self.item_size_x = 44
self.x = int(GAME_WIDTH / 2 - self.slots * self.item_size_x / 2)
self.y = GAME_HEIGHT - 32
@property
def is_full(self):
return len(self.items) >= self.slots
def layout(self):
for (i, item) in enumerate(self.items):
item.x = self.x + i * self.item_size_x
item.y = self.y
def get_class_count(self, input_class):
return len([i for i in self.items if i.__class__ is input_class])
def get_item_at(self, x, y):
for item in self.items:
if item.rect.contains(x, y):
return item
def pick_up(self, item, tile):
if self.is_full:
return
tile.remove_item(item)
item.on_pick_up()
self.add_item(item)
self.layout()
sound_pickup.play()
def add_item(self, item):
if self.is_full:
tile = player.get_drop_tile()
if tile:
item.on_dropped(tile)
else:
self.items.append(item)
self.layout()
def drop(self, item, tile):
self.items.remove(item)
if tile:
item.on_dropped(tile)
self.layout()
sound_drop.play()
def remove(self, item):
self.items.remove(item)
self.layout()
def craft(self, recipe, initial_item):
if initial_item in self.items:
slot_index = self.items.index(initial_item)
else:
slot_index = len(self.items)
new_items = []
for output in recipe.outputs:
crafted_item = output(output.get_default_anim(), 0, 0)
self.items.insert(slot_index, crafted_item)
if recipe.outputs_to_inventory:
new_items.append(crafted_item)
else:
self.drop(crafted_item, player.get_drop_tile())
for item_class, count in recipe.inputs.items():
for i in range(count):
if initial_item and initial_item.__class__ is item_class:
if initial_item.is_consumed_in_recipe:
if initial_item in self.items:
self.items.remove(initial_item)
initial_item.on_used_in_recipe(recipe)
initial_item = None
else:
for item in self.items:
if item.__class__ is item_class:
if item.is_consumed_in_recipe:
self.items.remove(item)
item.on_used_in_recipe(recipe)
break
while len(self.items) > self.slots:
if new_items:
self.drop(new_items[-1], player.get_drop_tile())
del new_items[-1]
else:
self.drop(self.items[-1], player.get_drop_tile())
recipe.on_craft()
self.layout()
def draw(self):
bacon.set_color(1, 1, 1, 1)
for i in range(self.slots):
bacon.draw_image(self.slot_image, self.x + i * self.item_size_x - self.slot_image.width / 2, self.y - self.slot_image.height / 2)
for item in self.items:
if item.show_durability:
bacon.set_color(0.5, 0, 0, 1.0)
Rect(item.x - 16, item.y + 16, item.x - 16 + 32 * item.durability, item.y + 18).fill()
bacon.set_color(1, 1, 1, 1)
bacon.draw_image(item.inventory_image, item.x - 16, item.y - 16, item.x + 16, item.y + 16)
def on_mouse_button(self, button, pressed):
if pressed and button == bacon.MouseButtons.left:
item = self.get_item_at(bacon.mouse.x, bacon.mouse.y)
if item:
show_craft_menu(item, item.x, item.y)
return True
return False
object_anims = {}
object_sprite_data = spriter.parse('res/Objects.scml')
for folder in object_sprite_data.folders:
for file in folder.files:
image = load_image(file.name)
frame = Frame(image, file.pivot_x, file.pivot_y)
anim = Anim([frame])
object_anims[file.name] = anim
object_anims['Fire'] = spritesheet_anim('Item-Fire.png', 1, 4, 16, 16)
object_anims['Fire'].time_per_frame = 0.1
blood_images = []
blood_dribble_images = []
blood_layer = None
def spawn_blood(x, y, dribble=False):
ti = tilemap.get_tile_index(x, y)
if blood_layer.images[ti]:
return
if dribble:
image = random.choice(blood_dribble_images)
else:
image = random.choice(blood_images)
blood_layer.images[ti] = image
tilemap = tiled.parse('res/Tilemap.tmx')
for tileset in tilemap.tilesets:
for image in tileset.images:
if hasattr(image, 'properties'):
props = image.properties
if 'Anim' in props:
if props['Anim'] not in object_anims:
object_anims[props['Anim']] = Anim([Frame(image, 16, 16)])
if 'Class' in props:
_spawn_classes[props['Class']].inventory_image = image
if 'Fence' in props:
fmt = props['Fence']
Fence.fence_anims[fmt] = Anim([Frame(image, 16, 16)])
if 'StrongFence' in props:
fmt = props['StrongFence']
StrongFence.fence_anims[fmt] = Anim([Frame(image, 16, 16)])
if 'SteelFence' in props:
fmt = props['SteelFence']
SteelFence.fence_anims[fmt] = Anim([Frame(image, 16, 16)])
if 'Blood' in props:
blood_images.append(image)
if 'BloodDribble' in props:
blood_dribble_images.append(image)
Fence.fence_anims[''] = Fence.get_default_anim()
StrongFence.fence_anims[''] = StrongFence.get_default_anim()
SteelFence.fence_anims[''] = SteelFence.get_default_anim()
class Tutorial(object):
shown = False
def __init__(self, text, rect):
self.text = text
self.rect = rect
player = Player(clothing_anims['Body'], 0, 0, default_player_clothing)
villagers = []
animals = []
waypoints = []
snares = []
tilemap.add_sprite(player)
inventory = Inventory()
tutorials = []
for layer in tilemap.layers:
if layer.name == 'Spawns':
tilemap.layers.remove(layer)
for i, image in enumerate(layer.images):
if image and hasattr(image, 'properties'):
tile = tilemap.tiles[i]
class_name = image.properties.get('Class')
anim_name = image.properties.get('Anim')
if class_name == 'Chicken':
animal = ChickenAnimal(chicken_anims, tile.rect.center_x, tile.rect.center_y)
animal.item_cls = Chicken
animals.append(animal)
tilemap.add_sprite(animal)
elif class_name == 'Sheep':
animal = SheepAnimal(sheep_anims, tile.rect.center_x, tile.rect.center_y)
animal.item_cls = Sheep
animals.append(animal)
tilemap.add_sprite(animal)
elif class_name == 'Cow':
animal = CowAnimal(cow_anims, tile.rect.center_x, tile.rect.center_y)
animal.item_cls = Cow
animals.append(animal)
tilemap.add_sprite(animal)
elif class_name:
spawn_item_on_tile(tile, class_name, anim_name)
factory_class = image.properties.get('FactoryClass')
if factory_class:
owner = image.properties.get('Owner')
cooldown = int(image.properties.get('Cooldown', 70))
factories.append(Factory(tile, factory_class, owner, cooldown))
if image.properties.get('Waypoint'):
waypoint = Waypoint(tile.rect.center_x, tile.rect.center_y)
waypoints.append(waypoint)
elif layer.name == 'Blood':
blood_layer = layer
camera = Camera()
villager_clothing = dict(
Baker = ['BrownSkirt', 'WhiteShirt'],
Butcher = ['BrownShoes', 'GreenPants', 'PurpleJacket', 'Hood'],
Tailor = ['BrownShoes', 'BrownSkirt', 'WhiteShirt', 'HairBlonde'],
Carpenter = ['MetalBoots', 'BrownSkirt', 'ChainTorso', 'MetalHat'],
Blacksmith = ['MetalBoots', 'MetalPants', 'ChainTorso', 'ChainHood'],
Farmer = ['GreenPants', 'MetalHat']
)
for object_layer in tilemap.object_layers:
for obj in object_layer.objects:
if obj.name == 'PlayerStart':
player.x = obj.x
player.y = obj.y
tilemap.update_sprite_position(player)
elif obj.name == 'Villager':
villager = Villager(clothing_anims['Body'], obj.x, obj.y, villager_clothing.get(obj.type))
villager.name = obj.type
villagers.append(villager)
tilemap.add_sprite(villager)
elif obj.name == 'Tutorial':
tutorial = Tutorial(obj.type, Rect(obj.x, obj.y, obj.x + obj.width, obj.y + obj.height))
tutorial.condition = obj.properties.get('Condition')
tutorial.owner = obj.properties.get('Owner')
tutorials.append(tutorial)
elif obj.name == 'ShopRegion':
for villager in villagers:
if villager.name == obj.type:
villager.shop_rect = Rect(obj.x, obj.y, obj.x + obj.width, obj.y + obj.height)
class GameStartScreen(bacon.Game):
def __init__(self):
sound_growl1.play()
def on_tick(self):
self.moon = moon.Moon()
self.moon.x = GAME_WIDTH / 2
self.moon.y = GAME_HEIGHT / 2
self.moon.angle = 0.0
bacon.clear(0, 0, 0, 1)
bacon.set_color(0.6, 0.6, 0.6, 1.0)
self.moon.draw()
bacon.set_color(1, 0, 0, 1)
bacon.draw_string(font_ui, 'Monthly Visitor',
0, 0, GAME_WIDTH, GAME_HEIGHT,
align = bacon.Alignment.center,
vertical_align = bacon.VerticalAlignment.center)
bacon.set_color(1, 1, 1, 1)
bacon.draw_string(font_ui, 'A game by Alex Holkner and Amanda Schofield',
0, GAME_HEIGHT / 2 + 24, GAME_WIDTH,
align = bacon.Alignment.center,
vertical_align = bacon.VerticalAlignment.center)
bacon.set_color(1, 1, 1, 1)
bacon.draw_string(font_ui, 'Click to start',
0, GAME_HEIGHT - 4, GAME_WIDTH,
align = bacon.Alignment.center,
vertical_align = bacon.VerticalAlignment.bottom)
def on_mouse_button(self, button, pressed):
game.screen = None
game.start()
class GameOverScreen(bacon.Game):
def __init__(self):
pass
def on_tick(self):
bacon.clear(0, 0, 0, 1)
bacon.set_color(1, 1, 1, 1)
bacon.draw_string(font_ui, 'You have died.',
0, 0, GAME_WIDTH, GAME_HEIGHT,
align = bacon.Alignment.center,
vertical_align = bacon.VerticalAlignment.center)
FULL_MOON_TIME = 30.0
MONTH_TIME = 180.0
lunar_names = [
'Waxing Gibbous',
'First Quarter',
'Waxing Crescent',
'New Moon',
'Waning Crescent',
'Third Quarter',
'Waning Gibbous',
'Waning Gibbous',
]
class Game(bacon.Game):
def __init__(self):
self.menu = None
self.screen = GameStartScreen()
self.tutorial = None
self.moon = moon.Moon()
self.moon.x = GAME_WIDTH - 36
self.moon.y = 36
self.moon.radius = 32
self.message = None
self.message_time = 0.0
self.tutorial_food_trigger = False
self.tutorial_full_moon = False
self.tutorial_end_full_moon = False
self.game_time = 0
def start(self):
self.lunar_cycle = 0.0
self.full_moon_time = 0.0
self.full_moon = False
self.curtain = 0.0
player.motive_food = 1.0
sound_dawn.play()
@property
def lunar_name(self):
if self.lunar_cycle == 0.0:
return 'FULL MOON'
else:
return lunar_names[int(self.lunar_cycle * 8.0)]
def on_tick(self):
self.game_time += bacon.timestep
update_tweens()
if self.screen:
self.screen.on_tick()
return
if self.message_time > 0.0:
self.message_time -= bacon.timestep
else:
self.message = None
# Lunar cycle
if not player.is_dying:
if self.full_moon:
self.full_moon_time -= bacon.timestep
if self.full_moon_time < 0.0:
if not self.tutorial_end_full_moon:
self.show_message("What happened?? Where am I?")
self.tutorial_end_full_moon = True
self.full_moon = False
player.end_wolf()
tween(self, 'curtain', 0.0, 0.3)
else:
self.lunar_cycle += bacon.timestep / MONTH_TIME
if self.lunar_cycle >= 0.95 and not self.tutorial_full_moon:
self.show_message("The moon... is calling to me. I can feel a change... within me...")
self.tutorial_full_moon = True
if self.lunar_cycle >= 1.0:
self.lunar_cycle = 0.0
self.full_moon_time = FULL_MOON_TIME
self.full_moon = True
player.start_wolf()
tween(self, 'curtain', 1.0, 0.3)
self.menu = None
# AI
for animal in animals:
animal.update_animal_movement()
for villager in villagers:
villager.update_villager_movement()
if not player.is_dying:
if player.is_wolf:
player.update_wolf_motives()
else:
player.update_player_motives()
#player.update_player_movement()
player.update_walk_target_movement()
if not self.full_moon:
for factory in factories:
factory.update()
if player.motive_food <= 0:
player.die()
if player.action == 'walk':
if player.is_wolf:
player.set_footsteps(sound_footsteps2)
else:
player.set_footsteps(sound_footsteps1)
player.footsteps_voice.gain = 0.3
else:
player.set_footsteps(None)
# Camera
camera.x = int(player.x)
camera.y = int(player.y)
camera.clamp_to_bounds(tilemap.get_bounds())
# Rendering
bacon.clear(0.8, 0.7, 0.6, 1.0)
bacon.push_transform()
camera.apply()
self.draw_world()
bacon.pop_transform()
self.draw_ui()
def draw_world(self):
bacon.set_color(1, 1, 1, 1)
tilemap.draw(camera.get_bounds())
bacon.set_color(0, 0, 1, 1)
#tilemap.get_tile_rect(player.x, player.y).draw()
bacon.set_color(1, 0, 0, 1)
#tilemap.get_bounds().draw()
def draw_ui(self):
bacon.set_color(1, 1, 1, 1)
inventory.draw()
if self.curtain:
bacon.set_color(0, 0, 0, 1)
bacon.fill_rect(0, 0, GAME_WIDTH, self.curtain * 60)
bacon.fill_rect(0, GAME_HEIGHT, GAME_WIDTH, GAME_HEIGHT - self.curtain * 60)
bacon.set_color(1, 1, 1, 1)
self.moon.cycle = self.lunar_cycle
self.moon.draw()
self.draw_tutorial()
bacon.set_color(1, 1, 1, 1)
#bacon.draw_string(font_ui, 'Lunar: %f' % self.lunar_cycle, GAME_WIDTH, 64, align = bacon.Alignment.right)
#bacon.draw_string(font_ui, self.lunar_name, GAME_WIDTH, 120, align = bacon.Alignment.right)
bacon.set_color(1, 1, 1, 1)
if player.motive_food < player.motive_food_trigger:
if not self.tutorial_food_trigger and not player.is_wolf:
game.show_message("I'm so... hungry... must find something to eat!")
self.tutorial_food_trigger = True
if int(self.game_time * 4) % 2 == 0:
bacon.set_color(0, 0, 0, 0)
stamina_size = 86
bacon.draw_string(font_ui, 'Stamina', GAME_WIDTH - 2, 96, align=bacon.Alignment.right)
bacon.set_color(0.7, 0.7, 0.7, 1.0)
x = GAME_WIDTH - stamina_size - 4
y = 104
Rect(x - 2, y - 2, x + stamina_size + 2, y + 4).fill()
bacon.set_color(0.4, 0, 0, 1.0)
Rect(x, y, x + stamina_size * player.motive_food, y + 2).fill()
if self.menu:
self.menu.draw()
def draw_tutorial(self):
tutorial = None
for t in tutorials:
if not player.is_wolf and t.rect.contains(player.x, player.y):
if t.condition == 'Naked' and not player.naked:
continue
if t.owner:
if len([v for v in villagers if v.name == t.owner]) == 0:
continue
tutorial = t
break
if self.message:
tutorial = self.message
if tutorial != self.tutorial:
if self.tutorial and self.tutorial in tutorials:
tutorials.remove(self.tutorial)
self.tutorial = tutorial
if tutorial:
sound_chime.play()
style = bacon.Style(font_ui)
runs = [bacon.GlyphRun(style, tutorial.text)]
tutorial.glyph_layout = bacon.GlyphLayout(runs, 32, GAME_HEIGHT - 64, GAME_WIDTH - 64, None, align = bacon.Alignment.center, vertical_align = bacon.VerticalAlignment.bottom)
if tutorial:
bacon.set_color(0, 0, 0, 0.8)
g = tutorial.glyph_layout
r = Rect(g.x + g.width / 2- g.content_width / 2, g.y, g.x + g.width / 2 + g.content_width / 2, g.y - g.content_height)
r.fill()
bacon.set_color(1, 1, 1, 1)
bacon.draw_glyph_layout(tutorial.glyph_layout)
def show_message(self, message, time=5.0):
self.message = Tutorial(message, None)
game.message_time = time
def on_key(self, key, pressed):
if self.screen:
self.screen.on_key(key, pressed)
return
if ENABLE_CHEATS:
if pressed and key == bacon.Keys.w:
player.is_wolf = not player.is_wolf
if pressed and key == bacon.Keys.minus:
player.motive_food -= 0.2
if pressed and key == bacon.Keys.plus:
player.motive_food += 0.2
if pressed and key == bacon.Keys.right_bracket:
if self.full_moon:
self.full_moon_time = 0
else:
self.lunar_cycle += 0.25
if pressed and key == bacon.Keys.left_bracket:
self.lunar_cycle -= 0.25
def on_mouse_button(self, button, pressed):
if self.screen:
self.screen.on_mouse_button(button, pressed)
return
if self.menu:
self.menu.on_mouse_button(button, pressed)
return
if not player.is_wolf and not player.is_dying:
if inventory.on_mouse_button(button, pressed):
return
if pressed and button == bacon.MouseButtons.left:
x, y = camera.view_to_world(bacon.mouse.x, bacon.mouse.y)
ti = tilemap.get_tile_index(x, y)
tile = tilemap.tiles[ti]
if not player.walk_to_tile(tile):
# Path find failed, walk in straight line
player.path = [tile]
if tile.items:
player.target_item = tile.items[-1]
game = Game()
bacon.run(game) | mit | 1,221,519,002,521,031,700 | 30.716981 | 189 | 0.551396 | false | 3.416135 | false | false | false |
python/pythondotorg | events/views.py | 3 | 4793 | import datetime
from django.contrib import messages
from django.core.mail import BadHeaderError
from django.shortcuts import get_object_or_404, redirect
from django.urls import reverse_lazy
from django.utils import timezone
from django.views.generic import DetailView, ListView, FormView
from pydotorg.mixins import LoginRequiredMixin
from .models import Calendar, Event, EventCategory, EventLocation
from .forms import EventForm
class CalendarList(ListView):
model = Calendar
class EventListBase(ListView):
model = Event
paginate_by = 6
def get_object(self, queryset=None):
return None
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
featured_events = self.get_queryset().filter(featured=True)
try:
context['featured'] = featured_events[0]
except IndexError:
pass
context['event_categories'] = EventCategory.objects.all()[:10]
context['event_locations'] = EventLocation.objects.all()[:10]
context['object'] = self.get_object()
return context
class EventHomepage(ListView):
""" Main Event Landing Page """
template_name = 'events/event_list.html'
def get_queryset(self):
return Event.objects.for_datetime(timezone.now()).order_by('occurring_rule__dt_start')
class EventDetail(DetailView):
model = Event
def get_queryset(self):
return super().get_queryset().select_related()
def get_context_data(self, **kwargs):
data = super().get_context_data(**kwargs)
if data['object'].next_time:
dt = data['object'].next_time.dt_start
data.update({
'next_7': dt + datetime.timedelta(days=7),
'next_30': dt + datetime.timedelta(days=30),
'next_90': dt + datetime.timedelta(days=90),
'next_365': dt + datetime.timedelta(days=365),
})
return data
class EventList(EventListBase):
def get_queryset(self):
return Event.objects.for_datetime(timezone.now()).filter(calendar__slug=self.kwargs['calendar_slug']).order_by('occurring_rule__dt_start')
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['events_today'] = Event.objects.until_datetime(timezone.now()).filter(calendar__slug=self.kwargs['calendar_slug'])[:2]
context['calendar'] = get_object_or_404(Calendar, slug=self.kwargs['calendar_slug'])
return context
class PastEventList(EventList):
template_name = 'events/event_list_past.html'
def get_queryset(self):
return Event.objects.until_datetime(timezone.now()).filter(calendar__slug=self.kwargs['calendar_slug'])
class EventListByDate(EventList):
def get_object(self):
year = int(self.kwargs['year'])
month = int(self.kwargs['month'])
day = int(self.kwargs['day'])
return datetime.date(year, month, day)
def get_queryset(self):
return Event.objects.for_datetime(self.get_object()).filter(calendar__slug=self.kwargs['calendar_slug'])
class EventListByCategory(EventList):
def get_object(self, queryset=None):
return get_object_or_404(EventCategory, calendar__slug=self.kwargs['calendar_slug'], slug=self.kwargs['slug'])
def get_queryset(self):
qs = super().get_queryset()
return qs.filter(categories__slug=self.kwargs['slug'])
class EventListByLocation(EventList):
def get_object(self, queryset=None):
return get_object_or_404(EventLocation, calendar__slug=self.kwargs['calendar_slug'], pk=self.kwargs['pk'])
def get_queryset(self):
qs = super().get_queryset()
return qs.filter(venue__pk=self.kwargs['pk'])
class EventCategoryList(ListView):
model = EventCategory
paginate_by = 30
def get_queryset(self):
return self.model.objects.filter(calendar__slug=self.kwargs['calendar_slug'])
def get_context_data(self, **kwargs):
kwargs['event_categories'] = self.get_queryset()[:10]
return super().get_context_data(**kwargs)
class EventLocationList(ListView):
model = EventLocation
paginate_by = 30
def get_queryset(self):
return self.model.objects.filter(calendar__slug=self.kwargs['calendar_slug'])
class EventSubmit(LoginRequiredMixin, FormView):
template_name = 'events/event_form.html'
form_class = EventForm
success_url = reverse_lazy('events:event_thanks')
def form_valid(self, form):
try:
form.send_email(self.request.user)
except BadHeaderError:
messages.add_message(self.request, messages.ERROR, 'Invalid header found.')
return redirect('events:event_submit')
return super().form_valid(form)
| apache-2.0 | -130,566,504,485,138,610 | 31.385135 | 146 | 0.664511 | false | 3.849799 | false | false | false |
opesci/devito | devito/passes/iet/engine.py | 1 | 5796 | from collections import OrderedDict
from functools import partial, wraps
from sympy.tensor.indexed import IndexException
from devito.ir.iet import Call, FindNodes, MetaCall, Transformer
from devito.tools import DAG, as_tuple, filter_ordered, timed_pass
__all__ = ['Graph', 'iet_pass']
class Graph(object):
"""
A special DAG representing call graphs.
The nodes of the graph are IET Callables; an edge from node `a` to node `b`
indicates that `b` calls `a`.
The `apply` method may be used to visit the Graph and apply a transformer `T`
to all nodes. This may change the state of the Graph: node `a` gets replaced
by `a' = T(a)`; new nodes (Callables), and therefore new edges, may be added.
"""
def __init__(self, iet, *efuncs):
# Internal "known" functions
self.efuncs = OrderedDict([('root', iet)])
self.efuncs.update(OrderedDict([(i.name, i) for i in efuncs]))
# Foreign functions
self.ffuncs = []
self.dimensions = []
self.includes = []
self.headers = []
@property
def root(self):
return self.efuncs['root']
@property
def funcs(self):
retval = [MetaCall(v, True) for k, v in self.efuncs.items() if k != 'root']
retval.extend([MetaCall(i, False) for i in self.ffuncs])
return tuple(retval)
def _create_call_graph(self):
dag = DAG(nodes=['root'])
queue = ['root']
while queue:
caller = queue.pop(0)
callees = FindNodes(Call).visit(self.efuncs[caller])
for callee in filter_ordered([i.name for i in callees]):
if callee in self.efuncs: # Exclude foreign Calls, e.g., MPI calls
try:
dag.add_node(callee)
queue.append(callee)
except KeyError:
# `callee` already in `dag`
pass
dag.add_edge(callee, caller)
# Sanity check
assert dag.size == len(self.efuncs)
return dag
def apply(self, func, **kwargs):
"""
Apply ``func`` to all nodes in the Graph. This changes the state of the Graph.
"""
dag = self._create_call_graph()
# Apply `func`
for i in dag.topological_sort():
self.efuncs[i], metadata = func(self.efuncs[i], **kwargs)
# Track any new Dimensions introduced by `func`
self.dimensions.extend(list(metadata.get('dimensions', [])))
# Track any new #include and #define required by `func`
self.includes.extend(list(metadata.get('includes', [])))
self.includes = filter_ordered(self.includes)
self.headers.extend(list(metadata.get('headers', [])))
self.headers = filter_ordered(self.headers, key=str)
# Tracky any new external function
self.ffuncs.extend(list(metadata.get('ffuncs', [])))
self.ffuncs = filter_ordered(self.ffuncs)
# Track any new ElementalFunctions
self.efuncs.update(OrderedDict([(i.name, i)
for i in metadata.get('efuncs', [])]))
# If there's a change to the `args` and the `iet` is an efunc, then
# we must update the call sites as well, as the arguments dropped down
# to the efunc have just increased
args = as_tuple(metadata.get('args'))
if not args:
continue
def filter_args(v, efunc=None):
processed = list(v)
for _a in args:
try:
# Should the arg actually be dropped?
a, drop = _a
if drop:
if a in processed:
processed.remove(a)
continue
except (TypeError, ValueError, IndexException):
a = _a
if a in processed:
# A child efunc trying to add a symbol alredy added by a
# sibling efunc
continue
if efunc is self.root and not (a.is_Input or a.is_Object):
# Temporaries (ie, Symbol, Arrays) *cannot* be args in `root`
continue
processed.append(a)
return processed
stack = [i] + dag.all_downstreams(i)
for n in stack:
efunc = self.efuncs[n]
mapper = {}
for c in FindNodes(Call).visit(efunc):
if c.name not in stack:
continue
mapper[c] = c._rebuild(arguments=filter_args(c.arguments))
parameters = filter_args(efunc.parameters, efunc)
efunc = Transformer(mapper).visit(efunc)
efunc = efunc._rebuild(parameters=parameters)
self.efuncs[n] = efunc
# Apply `func` to the external functions
for i in range(len(self.ffuncs)):
self.ffuncs[i], _ = func(self.ffuncs[i], **kwargs)
def iet_pass(func):
@wraps(func)
def wrapper(*args, **kwargs):
if timed_pass.is_enabled():
maybe_timed = timed_pass
else:
maybe_timed = lambda func, name: func
try:
# Pure function case
graph, = args
maybe_timed(graph.apply, func.__name__)(func, **kwargs)
except ValueError:
# Instance method case
self, graph = args
maybe_timed(graph.apply, func.__name__)(partial(func, self), **kwargs)
return wrapper
| mit | 2,058,282,354,897,736,700 | 34.341463 | 86 | 0.525017 | false | 4.264901 | false | false | false |
wavesoft/robob | robob/logger.py | 1 | 2082 |
import logging
(BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE) = list(range(8))
#These are the sequences need to get colored ouput
RESET_SEQ = "\033[0m"
COLOR_SEQ = "\033[1;%dm"
BOLD_SEQ = "\033[1m"
#: Color lookup table for formatter
COLORS = {
'WARNING': YELLOW,
'INFO': GREEN,
'DEBUG': BLUE,
'CRITICAL': YELLOW,
'ERROR': RED
}
#: Aliases for constant-sized names
ALIASES = {
'WARNING' : 'WARN',
'INFO' : 'INFO',
'DEBUG' : 'DEBG',
'CRITICAL': 'CRIT',
'ERROR' : 'FAIL'
}
def formatter_message(message, use_color=True):
"""
Message formatter that expands $RESET and $BOLD macros
"""
if use_color:
message = message.replace("$RESET", RESET_SEQ).replace("$BOLD", BOLD_SEQ)
else:
message = message.replace("$RESET", "").replace("$BOLD", "")
return message
class ColoredFormatter(logging.Formatter):
"""
Colored formatter
"""
def __init__(self, msg, use_color = True):
logging.Formatter.__init__(self, msg)
self.use_color = use_color
def format(self, record):
"""
Format the specified log line
"""
levelname = record.levelname
if self.use_color and levelname in COLORS:
# Add color to level name
levelname_color = COLOR_SEQ % (30 + COLORS[levelname]) + ALIASES[levelname] + RESET_SEQ
record.levelname = levelname_color
# Make name
record.name = COLOR_SEQ % (30 + WHITE) + record.name + RESET_SEQ
return logging.Formatter.format(self, record)
def init(logLevel=logging.INFO):
"""
Call this function to initialize global logger
"""
# Custom logger class with multiple destinations
class RobobLogger(logging.Logger):
FORMAT = "[$BOLD%(levelname)s$RESET][%(name)s] %(message)s"
COLOR_FORMAT = formatter_message(FORMAT, True)
def __init__(self, name):
logging.Logger.__init__(self, name, logLevel)
# Use colored formatter
color_formatter = ColoredFormatter(self.COLOR_FORMAT)
# Add console target
console = logging.StreamHandler()
console.setFormatter(color_formatter)
self.addHandler(console)
# Set robob logger
logging.setLoggerClass(RobobLogger)
| apache-2.0 | -5,280,982,248,865,080,000 | 22.133333 | 90 | 0.675312 | false | 3.079882 | false | false | false |
bzzzz/cython | Cython/Utils.py | 1 | 6894 | #
# Cython -- Things that don't belong
# anywhere else in particular
#
import os, sys, re, codecs
def replace_suffix(path, newsuf):
base, _ = os.path.splitext(path)
return base + newsuf
def open_new_file(path):
if os.path.exists(path):
# Make sure to create a new file here so we can
# safely hard link the output files.
os.unlink(path)
# we use the ISO-8859-1 encoding here because we only write pure
# ASCII strings or (e.g. for file names) byte encoded strings as
# Unicode, so we need a direct mapping from the first 256 Unicode
# characters to a byte sequence, which ISO-8859-1 provides
return codecs.open(path, "w", encoding="ISO-8859-1")
def castrate_file(path, st):
# Remove junk contents from an output file after a
# failed compilation.
# Also sets access and modification times back to
# those specified by st (a stat struct).
try:
f = open_new_file(path)
except EnvironmentError:
pass
else:
f.write(
"#error Do not use this file, it is the result of a failed Cython compilation.\n")
f.close()
if st:
os.utime(path, (st.st_atime, st.st_mtime-1))
def modification_time(path):
st = os.stat(path)
return st.st_mtime
def file_newer_than(path, time):
ftime = modification_time(path)
return ftime > time
def path_exists(path):
# try on the filesystem first
if os.path.exists(path):
return True
# figure out if a PEP 302 loader is around
try:
loader = __loader__
# XXX the code below assumes as 'zipimport.zipimporter' instance
# XXX should be easy to generalize, but too lazy right now to write it
if path.startswith(loader.archive):
nrmpath = os.path.normpath(path)
arcname = nrmpath[len(loader.archive)+1:]
try:
loader.get_data(arcname)
return True
except IOError:
return False
except NameError:
pass
return False
# file name encodings
def decode_filename(filename):
if isinstance(filename, unicode):
return filename
try:
filename_encoding = sys.getfilesystemencoding()
if filename_encoding is None:
filename_encoding = sys.getdefaultencoding()
filename = filename.decode(filename_encoding)
except UnicodeDecodeError:
pass
return filename
# support for source file encoding detection
_match_file_encoding = re.compile(u"coding[:=]\s*([-\w.]+)").search
def detect_file_encoding(source_filename):
# PEPs 263 and 3120
f = open_source_file(source_filename, encoding="UTF-8", error_handling='ignore')
try:
chars = []
for i in range(2):
c = f.read(1)
while c and c != u'\n':
chars.append(c)
c = f.read(1)
encoding = _match_file_encoding(u''.join(chars))
if encoding:
return encoding.group(1)
finally:
f.close()
return "UTF-8"
normalise_newlines = re.compile(u'\r\n?|\n').sub
class NormalisedNewlineStream(object):
"""The codecs module doesn't provide universal newline support.
This class is used as a stream wrapper that provides this
functionality. The new 'io' in Py2.6+/3.x supports this out of the
box.
"""
def __init__(self, stream):
# let's assume .read() doesn't change
self._read = stream.read
self.close = stream.close
self.encoding = getattr(stream, 'encoding', 'UTF-8')
def read(self, count=-1):
data = self._read(count)
if u'\r' not in data:
return data
if data.endswith(u'\r'):
# may be missing a '\n'
data += self._read(1)
return normalise_newlines(u'\n', data)
def readlines(self):
content = []
data = self.read(0x1000)
while data:
content.append(data)
data = self.read(0x1000)
return u''.join(content).split(u'\n')
io = None
if sys.version_info >= (2,6):
try:
import io
except ImportError:
pass
def open_source_file(source_filename, mode="r",
encoding=None, error_handling=None,
require_normalised_newlines=True):
if encoding is None:
encoding = detect_file_encoding(source_filename)
#
try:
loader = __loader__
if source_filename.startswith(loader.archive):
return open_source_from_loader(
loader, source_filename,
encoding, error_handling,
require_normalised_newlines)
except (NameError, AttributeError):
pass
#
if io is not None:
return io.open(source_filename, mode=mode,
encoding=encoding, errors=error_handling)
else:
# codecs module doesn't have universal newline support
stream = codecs.open(source_filename, mode=mode,
encoding=encoding, errors=error_handling)
if require_normalised_newlines:
stream = NormalisedNewlineStream(stream)
return stream
def open_source_from_loader(loader,
source_filename,
encoding=None, error_handling=None,
require_normalised_newlines=True):
nrmpath = os.path.normpath(source_filename)
arcname = nrmpath[len(loader.archive)+1:]
data = loader.get_data(arcname)
if io is not None:
return io.TextIOWrapper(io.BytesIO(data),
encoding=encoding,
errors=error_handling)
else:
try:
import cStringIO as StringIO
except ImportError:
import StringIO
reader = codecs.getreader(encoding)
stream = reader(StringIO.StringIO(data))
if require_normalised_newlines:
stream = NormalisedNewlineStream(stream)
return stream
def str_to_number(value):
# note: this expects a string as input that was accepted by the
# parser already
if len(value) < 2:
value = int(value, 0)
elif value[0] == '0':
if value[1] in 'xX':
# hex notation ('0x1AF')
value = int(value[2:], 16)
elif value[1] in 'oO':
# Py3 octal notation ('0o136')
value = int(value[2:], 8)
elif value[1] in 'bB':
# Py3 binary notation ('0b101')
value = int(value[2:], 2)
else:
# Py2 octal notation ('0136')
value = int(value, 8)
else:
value = int(value, 0)
return value
def long_literal(value):
if isinstance(value, basestring):
value = str_to_number(value)
return not -2**31 <= value < 2**31
def none_or_sub(s, data):
if s is None:
return s
else:
return s % data
| apache-2.0 | 4,077,520,384,334,594,000 | 29.914798 | 94 | 0.588628 | false | 3.959793 | false | false | false |
GNOME/chronojump-server | chronojumpserver-django/config/settings/local.py | 1 | 2220 | from .base import * # noqa
from .base import env
# GENERAL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = env.bool('DJANGO_DEBUG', default=True)
# https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
SECRET_KEY = env('DJANGO_SECRET_KEY', default='frkS4wP89VDVwgwMutBoAnajychwzhxvcRYLCrtBbUjdVsas4lLba8tq9hUFfREU')
# https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
ALLOWED_HOSTS = [
"localhost",
"0.0.0.0",
"127.0.0.1",
"networks.chronojump.org",
]
# CACHES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': ''
}
}
# TEMPLATES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES[0]['OPTIONS']['debug'] = DEBUG # noqa F405
# django-debug-toolbar
# ------------------------------------------------------------------------------
# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#prerequisites
INSTALLED_APPS += ['debug_toolbar'] # noqa F405
# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#middleware
MIDDLEWARE += ['debug_toolbar.middleware.DebugToolbarMiddleware'] # noqa F405
# https://django-debug-toolbar.readthedocs.io/en/latest/configuration.html#debug-toolbar-config
DEBUG_TOOLBAR_CONFIG = {
'DISABLE_PANELS': [
'debug_toolbar.panels.redirects.RedirectsPanel',
],
'SHOW_TEMPLATE_CONTEXT': True,
}
INTERNAL_IPS = ['127.0.0.1', '10.0.2.2']
import socket
import os
if os.environ.get('USE_DOCKER') == 'yes':
hostname, _, ips = socket.gethostbyname_ex(socket.gethostname())
INTERNAL_IPS += [ip[:-1] + '1' for ip in ips]
# django-extensions
# ------------------------------------------------------------------------------
# https://django-extensions.readthedocs.io/en/latest/installation_instructions.html#configuration
INSTALLED_APPS += ['django_extensions'] # noqa F405
| agpl-3.0 | -4,395,200,191,714,006,500 | 37.275862 | 113 | 0.584685 | false | 3.639344 | false | false | false |
emillynge/python-remoteexecution | remoteexecution/Environments.py | 1 | 33256 | from __future__ import (absolute_import, print_function, unicode_literals, division)
__author__ = 'emil'
import abc
import pxssh
import Pyro4
import os
from copy import deepcopy
import json
from subprocess import (Popen, PIPE)
from jsoncodecs import (build_codec, HANDLERS)
import re
from time import sleep
from collections import (namedtuple, defaultdict)
from functools import partial
FNULL = open(os.devnull, 'w')
def set_default():
EnvironmentFactory.set_environments(communication=CommAllOnSameMachine, execution=ExecTest,
serializer=SerializingEnvironment)
EnvironmentFactory.set_settings(manager_work_dir=os.path.abspath('.'), manager_port=5000,
manager_interpreter='python2', manager_target='remote-exec-cli',
pyro_serializer='serpent')
def serializing_environment():
env_obj = EnvironmentFactory.get_environment('serializer')
assert isinstance(env_obj, SerializingEnvironment)
return env_obj
def communication_environment():
env_obj = EnvironmentFactory.get_environment('communication')
assert isinstance(env_obj, (CommunicationEnvironment, DTUHPCCommunication, CommAllOnSameMachine,
Manager2ExecutorThroughSSH, Client2ManagerThroughSSH))
return env_obj
def execution_environment():
env_obj = EnvironmentFactory.get_environment('execution')
assert isinstance(env_obj, ExecutionEnvironment)
return env_obj
# noinspection PyDefaultArgument,PyDefaultArgument
class EnvironmentFactory(object):
def __init__(self):
self._settings = self._get_set_settings()
self._environments = self._get_set_environments()
self._set_get_factory(new_factory=self)
@classmethod
def set_from_env_info(cls, env_info_string):
env_info = json.loads(env_info_string)
for key, val in env_info.iteritems():
if key == 'settings':
cls.set_settings(**val)
else:
cls.set_environments(**{key: val})
@classmethod
def set_settings(cls, **factory_settings):
cls._get_set_settings(**factory_settings)
@staticmethod
def _get_set_settings(_settings=dict(), **factory_settings):
if 'pyro_serializer' in factory_settings:
Pyro4.config.SERIALIZER = factory_settings['pyro_serializer']
Pyro4.config.SERIALIZERS_ACCEPTED.add(factory_settings['pyro_serializer'])
if factory_settings:
_settings.update(factory_settings)
return _settings
@classmethod
def set_environments(cls, **environments):
cls._get_set_environments(**environments)
@classmethod
def _get_set_environments(cls, _environments=dict(), **environments):
for env_name, env_cls in environments.iteritems():
env_obj = cls.load_environment_obj(env_name, env_cls)
_environments[env_name] = env_obj
return _environments
@staticmethod
def _set_get_factory(_factory=list(), new_factory=None):
if new_factory is not None:
_factory.append(new_factory)
elif not _factory:
EnvironmentFactory()
return _factory[0]
@staticmethod
def load_environment_obj(env_name, cls):
if isinstance(cls, (str, unicode)):
return EnvironmentFactory.load_environment_obj(env_name, import_obj_from(*tuple(cls.split(':'))))
elif isinstance(cls, dict):
return EnvironmentFactory.load_environment_obj(env_name, import_obj_from(*cls.popitem()))
elif issubclass(cls, Environment):
return cls()
raise InvalidUserInput('Trying to set invalid environment', argname=env_name,
expected='Subclass of Environment or string', found=cls)
@classmethod
def get_environment(cls, environment_name):
factory = cls._set_get_factory()
if not factory:
factory = cls()
if environment_name not in factory._environments:
raise InvalidUserInput('Requested environment has not been set', argname='environment_name',
expected=factory._environments, found=environment_name)
env_obj = factory._environments[environment_name]
assert issubclass(env_obj.__class__, Environment)
env_obj.set_settings(**factory._settings)
return env_obj
def __repr__(self):
env_info = dict()
for env_name, env_obj in self._environments.iteritems():
env_info[env_name] = {env_obj.__class__.__module__: env_obj.__class__.__name__}
env_info['settings'] = self._settings
env_info_string = json.dumps(env_info)
return env_info_string.replace(' ', '')
@classmethod
def cls_repr(cls):
factory = cls._set_get_factory()
return factory.__repr__()
from .Utils import (WrappedProxy, import_obj_from, InvalidUserInput, DummyLogger, TunnelForwarder, RemoteCommandline,
SSHPrompt, get_external_ip, BashPrompt, SSHPopen)
from .ClientSide import (BaseScriptGenerator, SimpleScriptGenerator, HPCScriptGenerator)
class Environment(object):
__metaclass__ = abc.ABCMeta
def __init__(self):
self.settings = dict()
self.logger = DummyLogger()
def set_attribute_if_in_settings(self, *attributes, **settings):
_settings = deepcopy(settings)
for attr in attributes:
if attr in _settings:
setattr(self, attr, _settings.pop(attr))
return _settings
def set_settings(self, **settings):
self.settings = settings
def is_attrs_set(self, **attr_expect_pairs):
for attr_name, expect_type in attr_expect_pairs.iteritems():
self.is_attr_set(attr_name, expect_type)
def is_attr_set(self, attr_name, expected_type):
attr = getattr(self, attr_name)
InvalidUserInput.isinstance(attr_name, expected_type, attr, message='missing environment settings', indent=4,
declaration_frame='calling')
def __repr__(self):
return '{"' + self.__class__.__module__ + '":"' + self.__class__.__name__ + '"}'
class BashMixin(object):
@staticmethod
def bash_prompt(work_dir):
s = BashPrompt()
s.sendline('cd {0}'.format(work_dir))
s.prompt()
return s
def spoof_manager_remote_cli(self):
ex_env = execution_environment()
def ssh_instance_generator():
return self.bash_prompt(ex_env.manager_work_dir)
cli = RemoteCommandline(ssh_instance_generator, ex_env.manager_interpreter, ex_env.manager_target)
return cli
def spoof_executor_remote_cli(self):
ex_env = execution_environment()
def ssh_instance_generator():
self.bash_prompt(ex_env.executor_work_dir)
cli = RemoteCommandline(ssh_instance_generator, ex_env.executor_interpreter, ex_env.executor_target)
return cli
class SSHMixin(object):
def __init__(self):
self.tunnel_managers = dict()
super(SSHMixin, self).__init__()
def make_tunnel(self, ssh_settings, local_bind_address=None, remote_bind_address=None):
"""
make a ssh tunnel from local_bind_address to remote_bind_address on a remote host. login with ssh_settings
:param local_bind_address: tuple with (host, port) if None a free port is found on localhost
:param remote_bind_address: tuple with (host, port)
:param ssh_settings: dict with the settings:
ssh_address_or_host
ssh_port
ssh_host_key
ssh_username
ssh_password
ssh_private_key
:return: local address bindings (local_host, local_port)
"""
assert isinstance(ssh_settings, dict)
connection = str(ssh_settings['ssh_address_or_host'])
if 'ssh_username' in ssh_settings:
connection += '@' + ssh_settings['ssh_username']
if connection in self.tunnel_managers:
return self.tunnel_managers[connection].on_the_fly_tunnel(local_bind_address=local_bind_address, remote_bind_address=remote_bind_address)
server = TunnelForwarder(
local_bind_address=local_bind_address,
remote_bind_address=remote_bind_address,
logger=DummyLogger(),
raise_exception_if_any_forwarder_have_a_problem=False,
**ssh_settings)
server.start()
self.tunnel_managers[connection] = server
return server.local_bind_host, server.local_bind_port
@staticmethod
def ssh_prompt(ssh_settings, work_dir):
s = SSHPrompt()
if not s.login(**ssh_settings):
raise pxssh.ExceptionPxssh('Login failed')
s.sendline('cd {0}'.format(work_dir))
s.prompt()
return s
class _CommunicationRequired(Environment):
__metaclass__ = abc.ABCMeta
# Remote commandlines always needed to get from client to manager side.
# in some cases it may be necessary to get to the executor side and from executor to manager
# noinspection PyArgumentList,PyArgumentList
@abc.abstractmethod
def make_remote_cli_client2manager(self):
return RemoteCommandline(None)
# optional
# noinspection PyArgumentList,PyArgumentList
@abc.abstractmethod
def make_remote_cli_manager2executor(self):
return RemoteCommandline(None)
# optional
# noinspection PyArgumentList,PyArgumentList
@abc.abstractmethod
def make_remote_cli_executor2manager(self):
return RemoteCommandline(None)
# Tunnels are needed from client and executor to manager to obtain manager_proxy.
# Manager tunnels should only take optional address arguments.
# Furthermore a proxy is needed from client and manager to the executor, these should take address arguments
# Optionally proxies from manager and executor can be implemented with address arguments.
# All tunnels return the *local* bindings for host and port
# *Manager tunnels*
@abc.abstractmethod
def client2manager_tunnel(self, manager_host=None, manager_port=None):
host = str()
port = int()
return host, port
@abc.abstractmethod
def executor2manager_tunnel(self, manager_host=None, manager_port=None):
host = str()
port = int()
return host, port
# *Executor tunnels*
@abc.abstractmethod
def manager2executor_tunnel(self, executor_host, executor_port):
host = str()
port = int()
return host, port
# *Client tunnels* (optional)
@abc.abstractmethod
def manager2client_tunnel(self, client_host, client_port):
host = str()
port = int()
return host, port
class _CommunicationOptionals(object):
@property
def executor_popen(self):
raise NotImplementedError('Method is marked as non-available for this configuration')
def manager2client_tunnel(self, client_host, client_port):
raise NotImplementedError('Method is marked as non-available for this configuration')
def make_remote_cli_manager2executor(self):
raise NotImplementedError('Method is marked as non-available for this configuration')
def make_remote_cli_executor2manager(self):
raise NotImplementedError('Method is marked as non-available for this configuration')
class CommunicationEnvironment(_CommunicationOptionals, _CommunicationRequired, BashMixin):
def __init__(self):
super(Environment, self).__init__()
self.my_location = None
self._my_ip = None
self.manager_port = None
self._manager_ip = None
self._manager_proxy = None
self._manager_side_cli = None
self._client_side_cli = None
self._executor_side_cli = None
self.logger = DummyLogger()
def set_settings(self, manager_port=None, manager_ip=None,
manager_work_dir=None, client_work_dir=None, executor_work_dir=None,
**settings):
self.manager_port = manager_port or self.manager_port
self._manager_ip = manager_ip or self._manager_ip
self.is_attr_set('manager_port', int)
super(CommunicationEnvironment, self).set_settings(**settings)
@property
def manager_side_cli(self):
if self._manager_side_cli:
return self._manager_side_cli
if self.my_location == 'client':
self._manager_side_cli = self.make_remote_cli_client2manager()
return self._manager_side_cli
if self.my_location == 'executor':
self._manager_side_cli = self.make_remote_cli_executor2manager()
return self._manager_side_cli
if self.my_location == 'manager':
self._manager_side_cli = self.spoof_manager_remote_cli()
raise Exception('Cannot request a manager command line when my_location is unknown')
@property
def client2manager_side_cli(self):
self.my_location = 'client'
return self.manager_side_cli
@property
def executor2manager_side_cli(self):
self.my_location = 'executor'
return self.manager_side_cli
@property
def my_ip(self):
if not self._my_ip:
self._my_ip = get_external_ip()
return self._my_ip
@property
def manager_ip(self):
if self._manager_ip:
return self._manager_ip
if self.my_location != 'manager':
self.manager_side_cli('-i isup manager')
ip = self.manager_side_cli.get('ip')[0]
else:
ip = self.my_ip
self._manager_ip = ip
EnvironmentFactory.set_settings(manager_ip=ip)
return ip
@property
def manager_host(self):
""" default is that the manager is registered on the external ip
:return: the hostname used to connect to manager.
"""
return self.manager_ip
# This one is inferred and should not be overridden
def client2executor_tunnel(self, executor_host, executor_port):
manager_host_binding, manager_port_binding = self.client2manager_proxy.env_call('communication',
'manager2executor_tunnel',
executor_host,
executor_port)
host, port = self.client2manager_tunnel(manager_host=manager_host_binding, manager_port=manager_port_binding)
return host, port
# This one is inferred and should not be overridden
def executor2client_tunnel(self, client_host, client_port):
manager_host_binding, manager_port_binding = self.executor2manager_proxy.env_call('communication',
'manager2client_tunnel',
client_host,
client_port)
host, port = self.executor2manager_tunnel(manager_host=manager_host_binding, manager_port=manager_port_binding)
return host, port
# *Manager proxies*
@property
def client2manager_proxy(self, manager_host=None, manager_port=None):
if not self._manager_proxy:
local_host, local_port = self.client2manager_tunnel(manager_host=manager_host, manager_port=manager_port)
self._manager_proxy = WrappedProxy('remote_execution.manager@{0}:{1}'.format(local_host, local_port))
return self._manager_proxy
@property
def executor2manager_proxy(self, manager_host=None, manager_port=None):
if not self._manager_proxy:
local_host, local_port = self.executor2manager_tunnel(manager_host=manager_host, manager_port=manager_port)
self._manager_proxy = WrappedProxy('remote_execution.manager@{0}:{1}'.format(local_host, local_port))
return self._manager_proxy
class Client2ManagerThroughSSH(SSHMixin, CommunicationEnvironment):
def __init__(self):
self.client2manager_ssh_settings = None
super(Client2ManagerThroughSSH, self).__init__()
def set_settings(self, ssh_client2manager=None, **settings):
self.client2manager_ssh_settings = ssh_client2manager
self.is_attr_set('client2manager_ssh_settings', (dict,))
super(Client2ManagerThroughSSH, self).set_settings(**settings)
def client2manager_tunnel(self, manager_host=None, manager_port=None):
self.my_location = 'client'
manager_host = manager_host or self.manager_host
manager_port = manager_port or self.manager_port
return self.make_tunnel(self.client2manager_ssh_settings, remote_bind_address=(manager_host, manager_port))
def make_remote_cli_client2manager(self):
ex_env = execution_environment()
def ssh_instance_generator():
return self.ssh_prompt(self.client2manager_ssh_settings, ex_env.manager_work_dir)
cli = RemoteCommandline(ssh_instance_generator, ex_env.manager_interpreter, ex_env.manager_target)
return cli
class Executor2ManagerThroughSSH(SSHMixin, CommunicationEnvironment):
def __init__(self):
self.executor2manager_ssh_settings = None
super(Executor2ManagerThroughSSH, self).__init__()
def set_settings(self, ssh_executor2manager=None, **settings):
self.executor2manager_ssh_settings = ssh_executor2manager
self.is_attr_set('client2manager_ssh_settings', dict)
super(Executor2ManagerThroughSSH, self).set_settings(**settings)
def executor2manager_tunnel(self, manager_host=None, manager_port=None):
self.my_location = 'client'
manager_host = manager_host or self.manager_host
manager_port = manager_port or self.manager_port
return self.make_tunnel(self.executor2manager_ssh_settings, remote_bind_address=(manager_host, manager_port))
def make_remote_cli_executor2manager(self):
ex_env = execution_environment()
def ssh_instance_generator():
return self.ssh_prompt(self.executor2manager_ssh_settings, ex_env.manager_work_dir)
cli = RemoteCommandline(ssh_instance_generator, ex_env.manager_interpreter, ex_env.manager_target)
return cli
class Manager2ExecutorThroughSSH(SSHMixin, CommunicationEnvironment):
def __init__(self):
self.manager2executor_ssh_settings = None
self.executor_popen_ssh = list()
super(Manager2ExecutorThroughSSH, self).__init__()
def set_settings(self, ssh_manager2executor=None, **settings):
self.manager2executor_ssh_settings = ssh_manager2executor
self.is_attr_set('manager2executor_ssh_settings', dict)
super(Manager2ExecutorThroughSSH, self).set_settings(**settings)
def manager2executor_tunnel(self, executor_host, executor_port):
self.my_location = 'manager'
return self.make_tunnel(self.manager2executor_ssh_settings, remote_bind_address=(executor_host, executor_port))
def make_remote_cli_manager2executor(self):
ex_env = execution_environment()
def ssh_instance_generator():
return self.ssh_prompt(self.manager2executor_ssh_settings, ex_env.executor_work_dir)
cli = RemoteCommandline(ssh_instance_generator, ex_env.executor_interpreter, ex_env.executor_target)
return cli
def ssh_instance_manager2executor(self):
ex_env = execution_environment()
return self.ssh_prompt(self.manager2executor_ssh_settings, ex_env.executor_work_dir)
def executor_popen(self, *args, **kwargs):
for ssh_prompt in self.executor_popen_ssh:
if not ssh_prompt.is_locked():
break
else: # executed if no break occurred, i.e no unlocked prompts found
ssh_prompt = self.ssh_instance_manager2executor()
self.executor_popen_ssh.append(ssh_prompt)
ex_env = execution_environment()
return SSHPopen(*args, work_dir=ex_env.executor_work_dir, ssh_prompt=ssh_prompt, logger=self.logger, **kwargs)
class ManagerAndExecutorOnLAN(CommunicationEnvironment):
def manager2executor_tunnel(self, executor_host, executor_port):
return executor_host, executor_port
def executor2manager_tunnel(self, manager_host=None, manager_port=None):
if manager_host is None and self:
raise NotImplementedError('Cannot determine manager_host automatically over LAN')
manager_port = manager_port or self.manager_port
return manager_host, manager_port
class DTUHPCCommunication(Client2ManagerThroughSSH, ManagerAndExecutorOnLAN):
pass
class TethysCommunication(Client2ManagerThroughSSH, Manager2ExecutorThroughSSH, Executor2ManagerThroughSSH):
pass
class CommManagerAndExecutorOnSameMachine(CommunicationEnvironment):
@property
def executor_popen(self):
return Popen
def manager2executor_tunnel(self, executor_host, executor_port):
return executor_host, executor_port
def make_remote_cli_executor2manager(self):
return self.spoof_manager_remote_cli()
def executor2manager_tunnel(self, manager_host=None, manager_port=None):
manager_host = manager_host or self.manager_host
manager_port = manager_port or self.manager_port
return manager_host, manager_port
class CommClientAndManagerOnSameMachine(CommunicationEnvironment):
def manager2client_tunnel(self, client_host, client_port):
return client_host, client_port
def client2manager_tunnel(self, manager_host=None, manager_port=None):
manager_host = manager_host or self.manager_host
manager_port = manager_port or self.manager_port
return manager_host, manager_port
def make_remote_cli_client2manager(self):
return self.spoof_manager_remote_cli()
def manager2executor_tunnel(self, executor_host, executor_port):
return executor_host, executor_port
class CommAllOnSameMachine(CommManagerAndExecutorOnSameMachine, CommClientAndManagerOnSameMachine):
@property
def manager_host(self):
return 'localhost'
@property
def my_ip(self):
return 'localhost'
class ExecutionEnvironment(Environment):
def __init__(self):
self.client_interpreter = 'python'
self.client_target = 'remote-exec-cli'
self.client_work_dir = os.path.curdir
self.manager_interpreter = 'python'
self.manager_target = 'remote-exec-cli'
self.manager_work_dir = None
self.executor_interpreter = 'python'
self.executor_target = 'remote-exec-cli'
self.executor_work_dir = None
self.output_cls = namedtuple('Output', ['stdout', 'stderr'])
super(ExecutionEnvironment, self).__init__()
def set_settings(self, **settings):
_settings = self.set_attribute_if_in_settings('client_interpreter',
'client_target',
'client_work_dir',
'manager_interpreter',
'manager_target',
'manager_work_dir',
'executor_interpreter',
'executor_target',
'executor_work_dir', **settings)
self.is_attrs_set(client_interpreter=(str, unicode),
client_target=(str, unicode),
client_work_dir=(str, unicode),
manager_interpreter=(str, unicode),
manager_target=(str, unicode),
manager_work_dir=(str, unicode),
executor_interpreter=(str, unicode),
executor_target=(str, unicode),
executor_work_dir=(str, unicode))
super(ExecutionEnvironment, self).set_settings(**_settings)
@property
def client_command_line_prefix(self):
return '{1} -E \'{2}\''.format(self.client_interpreter, self.client_target, EnvironmentFactory.cls_repr())
@property
def manager_command_line_prefix(self):
return '{1} -E \'{2}\''.format(self.manager_interpreter, self.manager_target, EnvironmentFactory.cls_repr())
@property
def executor_command_line_prefix(self):
return '{1} -E \'{2}\''.format(self.executor_interpreter, self.executor_target, EnvironmentFactory.cls_repr())
@abc.abstractmethod
def job_start(self, execution_script_location):
job_id = str()
return job_id
@abc.abstractmethod
def job_stat(self, job_id):
state = dict()
return state
@abc.abstractmethod
def job_del(self, job_id):
output = self.output_cls()
return output
# noinspection PyArgumentList
@abc.abstractproperty
def script_generator(self):
return BaseScriptGenerator()
class ExecManagerAndExecutorOnSameMachine(ExecutionEnvironment):
def set_settings(self, **settings):
_settings = self.set_attribute_if_in_settings('manager_work_dir', 'manager_interpreter', 'manager_target',
**settings)
self.executor_work_dir = self.manager_work_dir
self.executor_interpreter = self.manager_interpreter
self.executor_target = self.manager_target
super(ExecManagerAndExecutorOnSameMachine, self).set_settings(**_settings)
class ExecClientAndManagerOnSameMachine(ExecutionEnvironment):
def set_settings(self, **settings):
_settings = self.set_attribute_if_in_settings('manager_work_dir', 'manager_interpreter', 'manager_target',
**settings)
self.client_work_dir = self.manager_work_dir
self.client_interpreter = self.manager_interpreter
self.client_target = self.manager_target
super(ExecClientAndManagerOnSameMachine, self).set_settings(**_settings)
class ExecAllOnSameMachine(ExecManagerAndExecutorOnSameMachine, ExecClientAndManagerOnSameMachine):
def set_settings(self, work_dir=None, interpreter=None, target=None, **settings):
_settings = self.set_attribute_if_in_settings('manager_work_dir', 'manager_target', 'manager_interpreter',
**settings)
self.manager_work_dir = work_dir or self.manager_work_dir
self.manager_target = target or self.manager_target
self.manager_interpreter = interpreter or self.manager_interpreter
super(ExecAllOnSameMachine, self).set_settings(**_settings)
class PopenExecution(ExecutionEnvironment):
def job_start(self, execution_script_location):
comm_env = communication_environment()
comm_env.logger = self.logger
_POpen = comm_env.executor_popen
with open(execution_script_location) as fp:
commands = fp.readline().split(' ')
self.logger.debug(commands)
p = _POpen(commands, stderr=True, stdout=False)
err_lines = p.stderr.read()
if err_lines:
self.logger.warning(err_lines)
job_id = p.pid
#p1 = _POpen(['sh', execution_script_location])
#p2 = _POpen(['ps', '--ppid', str(p1.pid)], stdout=PIPE)
#p2.stdout.readline()
#line = p2.stdout.readline()
#job_id = re.findall('\d+', line)[0]
return job_id
def job_stat(self, job_id):
comm_env = communication_environment()
comm_env.logger = self.logger
_POpen = comm_env.executor_popen
commands = ['ps', '-p', job_id]
self.logger.debug(commands)
p_stat = _POpen(commands, stdout=PIPE, stderr=PIPE)
p_stat.wait(timeout=5)
self.logger.debug(p_stat.stdout.readline()) # dumping first line
line = p_stat.stdout.readline()
self.logger.debug(line)
err_lines = p_stat.stderr.read()
if err_lines:
self.logger.warning(err_lines)
rexp = re.findall('(\d\d:\d\d:\d\d) (.+?)((<defunct>)|($))', line)
if rexp:
time = rexp[0][0]
if rexp[0][2]:
state = 'C'
else:
state = 'R'
else:
time = '00:00:00'
state = 'C'
return state, time
@property
def script_generator(self):
return SimpleScriptGenerator(self.executor_work_dir)
def job_del(self, job_id, fire_and_forget=False):
comm_env = communication_environment()
comm_env.logger = self.logger
_POpen = comm_env.executor_popen
if fire_and_forget:
Popen(['kill', '-9', job_id])
return None
p = _POpen(['kill', '-9', job_id], stderr=PIPE, stdout=PIPE)
return p.communicate()
class QsubExecution(ExecutionEnvironment):
def __init__(self):
self._available_modules = None
self.base_modules = None
super(QsubExecution, self).__init__()
def set_settings(self, **settings):
_settings = self.set_attribute_if_in_settings('base_modules', **settings)
super(QsubExecution, self).set_settings(**_settings)
@property
def available_modules(self):
if self._available_modules:
return self._available_modules
p = Popen("module avail", stdout=PIPE, stderr=PIPE, shell=True)
(o, e) = p.communicate()
if e:
lines = re.findall('/apps/dcc/etc/Modules/modulefiles\W+(.+)',
e, re.DOTALL)
else:
lines = list()
if lines:
lines = lines[0]
else:
return dict()
modules = re.split('[ \t\n]+', lines)[:-1]
module_ver_list = [m.strip('(default)').split('/') for m in modules]
module_dict = defaultdict(list)
for mod_ver in module_ver_list:
if len(mod_ver) < 2:
# noinspection PyTypeChecker
mod_ver.append('default')
module_dict[mod_ver[0]].append(mod_ver[1])
self._available_modules = module_dict
return module_dict
def job_stat(self, job_id):
p_stat = Popen(['qstat', job_id], stdout=PIPE, stderr=FNULL)
vals = re.split('[ ]+', re.findall(job_id + '.+', p_stat.stdout.read())[0])
keys = ['Job ID', 'Name', 'User', 'Time Use', 'S', 'Queue']
info = dict(zip(keys, vals[:-1]))
if info['S'] == 'Q':
p_start = Popen(['showstart', job_id], stdout=PIPE)
time_str = re.findall('Estimated Rsv based start in\W+(\d+:\d+:\d+)', p_start.stdout.read()) or ['00:00:00']
time_str = time_str[0]
return 'Q', time_str
return info['S'], info['Time Use']
def job_start(self, execution_script_location):
p_sub = Popen(['qsub', execution_script_location], stdout=PIPE, stderr=PIPE)
stdout = p_sub.stdout.read()
job_id = re.findall('(\d+)\.\w+', stdout)[0]
return job_id
def job_del(self, job_id, fire_and_forget=False):
if fire_and_forget:
Popen(['qdel', job_id], stderr=FNULL, stdout=FNULL)
return None
p = Popen(['qdel', job_id], stderr=PIPE, stdout=PIPE)
return p.communicate()
def script_generator(self):
comm_env = communication_environment()
# noinspection PyProtectedMember
return HPCScriptGenerator(self.base_modules, self.executor_work_dir,
comm_env._manager_proxy)
class ExecTest(ExecAllOnSameMachine, PopenExecution):
pass
class SerializingEnvironment(Environment):
def __init__(self):
self.serialize_wrapper = True
self.codec_handlers = HANDLERS
self.key_typecasts = list()
self._decoder = None
self._encoder = None
self.codec = None
super(SerializingEnvironment, self).__init__()
def build_codec(self):
(enc, dec) = build_codec('RemoteExec', *tuple(self.codec_handlers))
def encoder(obj):
return json.dumps(obj, cls=enc)
self._encoder = encoder
def decoder(obj):
return json.loads(obj, cls=dec, key_typecasts=self.key_typecasts)
self._decoder = decoder
@property
def encoder(self):
if not self._encoder:
self.build_codec()
return self._encoder
@property
def decoder(self):
if not self._decoder:
self.build_codec()
return self._decoder
def set_settings(self, **settings):
_settings = self.set_attribute_if_in_settings('codec_handlers',
'key_typercasts',
**settings)
super(SerializingEnvironment, self).set_settings(**_settings)
| gpl-3.0 | 4,472,739,699,373,511,700 | 37.759907 | 149 | 0.623226 | false | 4.129128 | false | false | false |
NirBenTalLab/proorigami-cde-package | cde-root/usr/local/apps/inkscape/share/inkscape/extensions/pathmodifier.py | 1 | 13934 | #!/usr/bin/env python
'''
Copyright (C) 2006 Jean-Francois Barraud, [email protected]
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
[email protected]
'''
import inkex, cubicsuperpath, bezmisc, simplestyle
import copy, math, re, random, xml.xpath
def parseTransform(transf,mat=[[1.0,0.0,0.0],[0.0,1.0,0.0]]):
if transf=="":
return(mat)
result=re.match("(translate|scale|rotate|skewX|skewY|matrix)\(([^)]*)\)",transf)
#-- translate --
if result.group(1)=="translate":
args=result.group(2).split(",")
dx=float(args[0])
if len(args)==1:
dy=0.0
else:
dy=float(args[1])
matrix=[[1,0,dx],[0,1,dy]]
#-- scale --
if result.groups(1)=="scale":
args=result.group(2).split(",")
sx=float(args[0])
if len(args)==1:
sy=sx
else:
sy=float(args[1])
matrix=[[sx,0,0],[0,sy,0]]
#-- rotate --
if result.groups(1)=="rotate":
args=result.group(2).split(",")
a=float(args[0])*math.pi/180
if len(args)==1:
cx,cy=(0.0,0.0)
else:
cx,cy=args[1:]
matrix=[[math.cos(a),-math.sin(a),cx],[math.sin(a),math.cos(a),cy]]
#-- skewX --
if result.groups(1)=="skewX":
a=float(result.group(2))*math.pi/180
matrix=[[1,math.tan(a),0],[0,1,0]]
#-- skewX --
if result.groups(1)=="skewX":
a=float(result.group(2))*math.pi/180
matrix=[[1,0,0],[math.tan(a),1,0]]
#-- matrix --
if result.group(1)=="matrix":
a11,a21,a12,a22,v1,v2=result.group(2).split(",")
matrix=[[float(a11),float(a12),float(v1)],[float(a21),float(a22),float(v2)]]
matrix=composeTransform(mat,matrix)
if result.end()<len(transf):
return(parseTransform(transf[result.end():],matrix))
else:
return matrix
def formatTransform(mat):
return("matrix(%f,%f,%f,%f,%f,%f)"%(mat[0][0],mat[1][0],mat[0][1],mat[1][1],mat[0][2],mat[1][2]))
def composeTransform(M1,M2):
a11=M1[0][0]*M2[0][0]+M1[0][1]*M2[1][0]
a12=M1[0][0]*M2[0][1]+M1[0][1]*M2[1][1]
a21=M1[1][0]*M2[0][0]+M1[1][1]*M2[1][0]
a22=M1[1][0]*M2[0][1]+M1[1][1]*M2[1][1]
v1=M1[0][0]*M2[0][2]+M1[0][1]*M2[1][2]+M1[0][2]
v2=M1[1][0]*M2[0][2]+M1[1][1]*M2[1][2]+M1[1][2]
return [[a11,a12,v1],[a21,a22,v2]]
def applyTransformToNode(mat,node):
m=parseTransform(node.getAttributeNS(None,"transform"))
newtransf=formatTransform(composeTransform(mat,m))
node.setAttributeNS(None,"transform", newtransf)
def applyTransformToPoint(mat,pt):
x=mat[0][0]*pt[0]+mat[0][1]*pt[1]+mat[0][2]
y=mat[1][0]*pt[0]+mat[1][1]*pt[1]+mat[1][2]
pt[0]=x
pt[1]=y
def fuseTransform(node):
m=parseTransform(node.getAttributeNS(None,"transform"))
d = node.getAttributeNS(None,'d')
p=cubicsuperpath.parsePath(d)
for comp in p:
for ctl in comp:
for pt in ctl:
applyTransformToPoint(m,pt)
node.setAttributeNS(None,'d', cubicsuperpath.formatPath(p))
node.removeAttributeNS(None,"transform")
def boxunion(b1,b2):
if b1 is None:
return b2
elif b2 is None:
return b1
else:
return((min(b1[0],b2[0]),max(b1[1],b2[1]),min(b1[2],b2[2]),max(b1[3],b2[3])))
def roughBBox(path):
xmin,xMax,ymin,yMax=path[0][0][0][0],path[0][0][0][0],path[0][0][0][1],path[0][0][0][1]
for pathcomp in path:
for ctl in pathcomp:
for pt in ctl:
xmin=min(xmin,pt[0])
xMax=max(xMax,pt[0])
ymin=min(ymin,pt[1])
yMax=max(yMax,pt[1])
return xmin,xMax,ymin,yMax
class PathModifier(inkex.Effect):
def __init__(self):
inkex.Effect.__init__(self)
##################################
#-- Selectionlists manipulation --
##################################
def computeBBox(self, aList):
bbox=None
for id, node in aList.iteritems():
if node.tagName == 'path':
d = node.attributes.getNamedItem('d')
p = cubicsuperpath.parsePath(d.value)
bbox=boxunion(roughBBox(p),bbox)
return bbox
def duplicateNodes(self, aList):
clones={}
for id,node in aList.iteritems():
clone=node.cloneNode(True)
#!!!--> should it be given an id?
#seems to work without this!?!
clone.setAttributeNS(None,"id", self.uniqueId(node.tagName))
node.parentNode.appendChild(clone)
clones[clone.getAttributeNS(None,"id")]=clone
return(clones)
def uniqueId(self, prefix):
id="%s%04i"%(prefix,random.randint(0,9999))
while len(xml.xpath.Evaluate('//*[@id="%s"]' % id,self.document)):
id="%s%04i"%(prefix,random.randint(0,9999))
return(id)
def expandGroups(self,aList,transferTransform=True):
for id, node in aList.items():
if node.tagName == 'g':
mat=parseTransform(node.getAttributeNS(None,"transform"))
for child in node.childNodes:
if child.nodeType==child.ELEMENT_NODE:
if transferTransform:
applyTransformToNode(mat,child)
aList.update(self.expandGroups({child.getAttribute('id'):child}))
if transferTransform:
node.removeAttribute("transform")
del aList[id]
return(aList)
def expandGroupsUnlinkClones(self,aList,transferTransform=True,doReplace=True):
for id in aList.keys()[:]:
node=aList[id]
if node.tagName == 'g':
self.expandGroups(aList,transferTransform)
self.expandGroupsUnlinkClones(aList,transferTransform,doReplace)
#Hum... not very efficient if there are many clones of groups...
elif node.tagName == 'use':
refid=node.getAttributeNS(inkex.NSS[u'xlink'],'href')
path = '//*[@id="%s"]' % refid[1:]
refnode = xml.xpath.Evaluate(path,self.document)[0]
newnode=refnode.cloneNode(True)
self.recursNewIds(newnode)
if node.hasAttributeNS(None,u'style'):
style=simplestyle.parseStyle(node.getAttributeNS(None,u'style'))
refstyle=simplestyle.parseStyle(refnode.getAttributeNS(None,u'style'))
style.update(refstyle)
newnode.setAttributeNS(None,'style',simplestyle.formatStyle(style))
applyTransformToNode(parseTransform(node.getAttributeNS(None,'transform')),newnode)
if doReplace:
parent=node.parentNode
parent.insertBefore(newnode,node)
parent.removeChild(node)
del aList[id]
newid=newnode.getAttributeNS(None,'id')
aList.update(self.expandGroupsUnlinkClones({newid:newnode},transferTransform,doReplace))
return aList
def recursNewIds(self,node):
if node.nodeType==node.ELEMENT_NODE and node.hasAttributeNS(None,u'id'):
node.setAttributeNS(None,u'id',self.uniqueId(node.tagName))
for child in node.childNodes:
self.recursNewIds(child)
# def makeClonesReal(self,aList,doReplace=True,recursivelytransferTransform=True):
# for id in aList.keys():
# node=aList[id]
# if node.tagName == 'g':
# childs={}
# for child in node.childNodes:
# if child.nodeType==child.ELEMENT_NODE:
# childid=child.getAttributeNS(None,'id')
# del aList[childid]
# aList.update(self.makeClonesReal({childid:child},doReplace))
# elif node.tagName == 'use':
# refid=node.getAttributeNS(inkex.NSS[u'xlink'],'href')
# path = '//*[@id="%s"]' % refid[1:]
# refnode = xml.xpath.Evaluate(path,document)[0]
# clone=refnode.cloneNode(True)
# cloneid=self.uniqueId(clone.tagName)
# clone.setAttributeNS(None,'id', cloneid)
# style=simplestyle.parseStyle(node.getAttributeNS(None,u'style'))
# refstyle=simplestyle.parseStyle(refnode.getAttributeNS(None,u'style'))
# style.update(refstyle)
# clone.setAttributeNS(None,'style',simplestyle.formatStyle(style))
# applyTransformToNode(parseTransform(node.getAttributeNS(None,'transform')),clone)
# if doReplace:
# parent=node.parentNode
# parent.insertBefore(clone,node)
# parent.removeChild(node)
# del aList[id]
# aList.update(self.expandGroupsUnlinkClones({cloneid:clone}))
# return aList
################################
#-- Object conversion ----------
################################
def rectToPath(self,node,doReplace=True):
if node.tagName == 'rect':
x =float(node.getAttributeNS(None,u'x'))
y =float(node.getAttributeNS(None,u'y'))
try:
rx=float(node.getAttributeNS(None,u'rx'))
ry=float(node.getAttributeNS(None,u'ry'))
except:
rx=0
ry=0
w =float(node.getAttributeNS(None,u'width' ))
h =float(node.getAttributeNS(None,u'height'))
d ='M %f,%f '%(x+rx,y)
d+='L %f,%f '%(x+w-rx,y)
d+='A %f,%f,%i,%i,%i,%f,%f '%(rx,ry,0,0,1,x+w,y+ry)
d+='L %f,%f '%(x+w,y+h-ry)
d+='A %f,%f,%i,%i,%i,%f,%f '%(rx,ry,0,0,1,x+w-rx,y+h)
d+='L %f,%f '%(x+rx,y+h)
d+='A %f,%f,%i,%i,%i,%f,%f '%(rx,ry,0,0,1,x,y+h-ry)
d+='L %f,%f '%(x,y+ry)
d+='A %f,%f,%i,%i,%i,%f,%f '%(rx,ry,0,0,1,x+rx,y)
newnode=self.document.createElement('path')
newnode.setAttributeNS(None,'d',d)
newnode.setAttributeNS(None,'id', self.uniqueId('path'))
newnode.setAttributeNS(None,'style',node.getAttributeNS(None,u'style'))
newnode.setAttributeNS(None,'transform',node.getAttributeNS(None,u'transform'))
fuseTransform(newnode)
if doReplace:
parent=node.parentNode
parent.insertBefore(newnode,node)
parent.removeChild(node)
return newnode
def objectToPath(self,node,doReplace=True):
#--TODO: support other object types!!!!
#--TODO: make sure cubicsuperpath supports A and Q commands...
if node.tagName == 'rect':
return(self.rectToPath(node,doReplace))
elif node.tagName == 'path':
attributes = node.attributes.keys()
for uri,attName in attributes:
if uri in [inkex.NSS[u'sodipodi'],inkex.NSS[u'inkscape']]:
# if attName not in ["d","id","style","transform"]:
node.removeAttributeNS(uri,attName)
fuseTransform(node)
return node
else:
inkex.debug("Please first convert objects to paths!...(got '%s')"%node.tagName)
return None
def objectsToPaths(self,aList,doReplace=True):
newSelection={}
for id,node in aList.items():
newnode=self.objectToPath(node,self.document)
del aList[id]
aList[newnode.getAttributeNS(None,u'id')]=newnode
################################
#-- Action ----------
################################
#-- overwrite this method in subclasses...
def effect(self):
#self.duplicateNodes(self.selected)
self.expandGroupsUnlinkClones(self.selected, True)
self.objectsToPaths(self.selected, True)
self.bbox=self.computeBBox(self.selected)
for id, node in self.selected.iteritems():
if node.tagName == 'path':
d = node.attributes.getNamedItem('d')
p = cubicsuperpath.parsePath(d.value)
#do what ever you want with p!
d.value = cubicsuperpath.formatPath(p)
class Diffeo(PathModifier):
def __init__(self):
inkex.Effect.__init__(self)
def applyDiffeo(self,bpt,vects=()):
'''
bpt is a base point and for v in vectors, v'=v-p is a tangent vector at bpt.
Defaults to identity!
'''
for v in vects:
v[0]-=bpt[0]
v[1]-=bpt[1]
#-- your transformations go here:
#x,y=bpt
#bpt[0]=f(x,y)
#bpt[1]=g(x,y)
#for v in vects:
# vx,vy=v
# v[0]=df/dx(x,y)*vx+df/dy(x,y)*vy
# v[1]=dg/dx(x,y)*vx+dg/dy(x,y)*vy
#
#-- !caution! y-axis is pointing downward!
for v in vects:
v[0]+=bpt[0]
v[1]+=bpt[1]
def effect(self):
#self.duplicateNodes(self.selected)
self.expandGroupsUnlinkClones(self.selected, True)
self.expandGroups(self.selected, True)
self.objectsToPaths(self.selected, True)
self.bbox=self.computeBBox(self.selected)
for id, node in self.selected.iteritems():
if node.tagName == 'path':
d = node.attributes.getNamedItem('d')
p = cubicsuperpath.parsePath(d.value)
for sub in p:
for ctlpt in sub:
self.applyDiffeo(ctlpt[1],(ctlpt[0],ctlpt[2]))
d.value = cubicsuperpath.formatPath(p)
#e = Diffeo()
#e.affect()
| mit | 5,068,792,643,310,192,000 | 36.058511 | 104 | 0.566743 | false | 3.235199 | false | false | false |
xsunfeng/cir | cir/workbench_views.py | 1 | 21105 | import json
from django.template.loader import render_to_string
from django.http import HttpResponse
from django.utils import timezone
from cir.models import *
import claim_views
from cir.phase_control import PHASE_CONTROL
import utils
def api_load_all_documents(request):
response = {}
context = {}
context["docs"] = []
forum = Forum.objects.get(id=request.session['forum_id'])
# retrieve docs in a folder
docs = Doc.objects.filter(forum_id=request.session['forum_id'])
for doc in docs:
doc_attr = {}
doc_attr['folder'] = doc.folder
doc_attr['title'] = doc.title
doc_attr['sections'] = []
ordered_sections = doc.sections.filter(order__isnull=False).order_by('order')
for section in ordered_sections:
doc_attr['sections'].append(section.getAttr(forum))
unordered_sections = doc.sections.filter(order__isnull=True).order_by('updated_at')
for section in unordered_sections:
doc_attr['sections'].append(section.getAttr(forum))
context["docs"].append(doc_attr);
response['workbench_document'] = render_to_string("workbench-documents.html", context)
return HttpResponse(json.dumps(response), mimetype='application/json')
def api_get_toc(request):
response = {}
context = {}
# retrieve docs not in any folder
context['root_docs'] = []
root_docs = Doc.objects.filter(forum_id=request.session['forum_id'], folder__isnull=True).order_by("order")
for doc in root_docs:
m_doc = {}
m_doc['name'] = doc.title
m_doc['id'] = doc.id
m_doc['description'] = doc.description
m_doc['content'] = []
for section in doc.sections.all():
m_sec = {}
m_sec["name"] = section.title
m_sec["id"] = section.id
m_doc['content'].append(m_sec)
m_doc['content'].sort(key = lambda x: x["id"])
context['root_docs'].append(m_doc)
# retrieve docs in a folder
folders = EntryCategory.objects.filter(forum_id=request.session['forum_id'], category_type='doc')
context['folders'] = []
for folder in folders:
m_folder = {}
m_folder['name'] = folder.name
m_folder['content'] = []
docs = Doc.objects.filter(folder=folder)
for doc in docs:
m_doc = {}
m_doc['name'] = doc.title
m_doc['id'] = doc.id
m_doc['description'] = doc.description
m_doc['content'] = []
for section in doc.sections.all():
m_sec = {}
m_sec["name"] = section.title
m_sec["id"] = section.id
m_doc['content'].append(m_sec)
m_doc['content'].sort(key = lambda x: x["id"])
m_folder['content'].append(m_doc)
context['folders'].append(m_folder)
response['document_toc'] = render_to_string("document-toc.html", context)
return HttpResponse(json.dumps(response), mimetype='application/json')
def api_get_doc_by_hl_id(request):
response = {}
context = {}
forum = Forum.objects.get(id=request.session['forum_id'])
# retrieve docs in a folder
# nugget claim one-one mapping
claim_id = request.REQUEST.get("claim_id")
if HighlightClaim.objects.filter(claim = Claim.objects.get(id=claim_id)).count() > 0:
hl = HighlightClaim.objects.filter(claim = Claim.objects.get(id=claim_id))[0].highlight
sec = DocSection.objects.get(id=hl.context.id)
doc = sec.doc
context['doc_name'] = doc.title
context['sections'] = []
context['doc_id'] = doc.id
ordered_sections = doc.sections.filter(order__isnull=False).order_by('order')
for section in ordered_sections:
context['sections'].append(section.getAttr(forum))
unordered_sections = doc.sections.filter(order__isnull=True).order_by('updated_at')
for section in unordered_sections:
context['sections'].append(section.getAttr(forum))
response['workbench_document'] = render_to_string("workbench-document.html", context)
response['doc_id'] = doc.id
response['highlight'] = hl.getAttr()
response['hl_id'] = hl.id
return HttpResponse(json.dumps(response), mimetype='application/json')
def api_get_doc_by_sec_id(request):
response = {}
context = {}
forum = Forum.objects.get(id=request.session['forum_id'])
# retrieve docs in a folder
sec_id = request.REQUEST.get("sec_id")
sec = DocSection.objects.get(id = sec_id)
doc = sec.doc
context['doc_name'] = doc.title
context['sections'] = []
context['doc_id'] = doc.id
ordered_sections = doc.sections.filter(order__isnull=False).order_by('order')
for section in ordered_sections:
context['sections'].append(section.getAttr(forum))
unordered_sections = doc.sections.filter(order__isnull=True).order_by('updated_at')
for section in unordered_sections:
context['sections'].append(section.getAttr(forum))
response['workbench_document'] = render_to_string("workbench-document.html", context)
response['doc_id'] = doc.id
return HttpResponse(json.dumps(response), mimetype='application/json')
def api_get_doc_by_doc_id(request):
response = {}
context = {}
forum = Forum.objects.get(id=request.session['forum_id'])
# retrieve docs in a folder
doc_id = request.REQUEST.get("doc_id")
doc = Doc.objects.get(id = doc_id)
context['doc_name'] = doc.title
context['doc_id'] = doc.id
context['sections'] = []
ordered_sections = doc.sections.filter(order__isnull=False).order_by('order')
for section in ordered_sections:
context['sections'].append(section.getAttr(forum))
unordered_sections = doc.sections.filter(order__isnull=True).order_by('updated_at')
for section in unordered_sections:
context['sections'].append(section.getAttr(forum))
response['workbench_document'] = render_to_string("workbench-document.html", context)
response['doc_id'] = doc.id
return HttpResponse(json.dumps(response), mimetype='application/json')
def api_get_init_doc(request):
response = {}
context = {}
forum = Forum.objects.get(id=request.session['forum_id'])
# retrieve docs in a folder
doc = Doc.objects.filter(forum_id=request.session['forum_id'], order__isnull=False).order_by('order')[0]
doc_id = doc.id
context['doc_name'] = doc.title
context['doc_id'] = doc_id
context['sections'] = []
ordered_sections = doc.sections.filter(order__isnull=False).order_by('order')
for section in ordered_sections:
context['sections'].append(section.getAttr(forum))
unordered_sections = doc.sections.filter(order__isnull=True).order_by('updated_at')
for section in unordered_sections:
context['sections'].append(section.getAttr(forum))
response['workbench_document'] = render_to_string("workbench-document.html", context)
response['doc_id'] = doc_id
return HttpResponse(json.dumps(response), mimetype='application/json')
def add_nugget_comment(request):
response = {}
context = {}
context['nugget_comments'] = []
author = request.user
forum_id = request.session['forum_id']
theme_id = request.REQUEST.get('theme_id')
content = request.REQUEST.get('content')
now = timezone.now()
nugget_comments = NuggetComment.objects.filter(forum_id = forum_id, theme_id = theme_id).order_by('created_at')
if (content != ""):
newNuggetComment = NuggetComment(author = author, forum_id = forum_id, theme_id = theme_id, content = content, created_at = now)
newNuggetComment.save()
nugget_comments = NuggetComment.objects.filter(forum_id = forum_id, theme_id = theme_id).order_by('created_at')
for nugget_comment in nugget_comments:
context['nugget_comments'].append(nugget_comment)
response['workbench_nugget_comments'] = render_to_string("workbench_nugget_comments.html", context)
return HttpResponse(json.dumps(response), mimetype='application/json')
def api_load_all_themes(request):
response = {}
context = {}
forum = Forum.objects.get(id=request.session['forum_id'])
context['forum_name'] = forum.full_name
context['forum_url'] = forum.url
themes = ClaimTheme.objects.filter(forum_id=request.session['forum_id'])
context["themes"] = []
for theme in themes:
context["themes"].append(theme)
context["phase"] = PHASE_CONTROL[forum.phase]
response['workbench_container'] = render_to_string("workbench-theme-container.html", context)
return HttpResponse(json.dumps(response), mimetype='application/json')
def api_load_highlights(request):
response = {}
response['highlights'] = []
theme_id = request.REQUEST.get('theme_id')
doc_id = request.REQUEST.get('doc_id')
doc = Doc.objects.get(id = doc_id)
if theme_id == "-1":
for section in doc.sections.all():
highlights = section.highlights.all()
for highlight in highlights:
highlight_info = highlight.getAttr()
response['highlights'].append(highlight_info)
else:
for section in doc.sections.all():
highlights = section.highlights.all()
for highlight in highlights:
if (highlight.theme != None and int(highlight.theme.id) == int(theme_id)):
highlight_info = highlight.getAttr()
response['highlights'].append(highlight_info)
return HttpResponse(json.dumps(response), mimetype='application/json')
def api_load_one_highlight(request):
response = {}
response['highlights'] = []
hl_id = request.REQUEST.get('hl_id')
hl = Highlight.objects.get(id = hl_id)
highlight_info = hl.getAttr()
response['highlight'] = highlight_info
return HttpResponse(json.dumps(response), mimetype='application/json')
def api_remove_claim(request):
response = {}
claim_id = request.REQUEST.get('claim_id')
c = Claim.objects.get(id=claim_id)
c.delete()
return HttpResponse(json.dumps(response), mimetype='application/json')
def api_add_claim(request):
response = {}
content = request.REQUEST.get('content')
theme_id = request.REQUEST.get('theme_id')
data_hl_ids = request.REQUEST.get('data_hl_ids')
category = request.REQUEST.get('category')
now = timezone.now()
if 'actual_user_id' in request.session:
actual_author = User.objects.get(id=request.session['actual_user_id'])
else:
actual_author = None
if actual_author:
newClaim = Claim(forum_id=request.session['forum_id'], author=actual_author, delegator=request.user, content=content,
created_at=now, updated_at=now, theme_id=theme_id, claim_category=category)
else:
newClaim = Claim(forum_id=request.session['forum_id'], author=request.user, created_at=now, updated_at=now, content=content,
theme_id=theme_id, claim_category=category)
newClaim.save()
if actual_author:
claim_version = ClaimVersion(forum_id=request.session['forum_id'], author=actual_author, delegator=request.user,
content=content, created_at=now, updated_at=now, claim=newClaim)
else:
claim_version = ClaimVersion(forum_id=request.session['forum_id'], author=request.user, content=content,
created_at=now, updated_at=now, claim=newClaim)
claim_version.save()
data_hl_ids = data_hl_ids.strip()
data_hl_ids_set = data_hl_ids.split(" ")
for data_hl_id in data_hl_ids_set:
newHighlightClaim = HighlightClaim(claim_id=newClaim.id, highlight_id=data_hl_id)
newHighlightClaim.save()
return HttpResponse(json.dumps(response), mimetype='application/json')
def api_assign_nugget(request):
highlight_id = request.REQUEST.get("highlight_id")
theme_id = request.REQUEST.get("theme_id")
highlight = Highlight.objects.get(id=highlight_id)
highlight.theme_id = theme_id
highlight.save()
response = {}
return HttpResponse(json.dumps(response), mimetype='application/json')
# nugget list zone
def api_change_to_nugget(request):
# input: highlight_ids, output: set as nugget
response = {}
context = {}
data_hl_ids = request.REQUEST.get("data_hl_ids").split(" ")
for data_hl_id in data_hl_ids:
hl = Highlight.objects.get(id = data_hl_id)
hl.is_nugget = True
hl.save()
docs = Doc.objects.filter(forum_id=request.session["forum_id"])
context['highlights'] = []
for doc in docs:
for section in doc.sections.all():
highlights = section.highlights.filter(is_nugget = True)
for highlight in highlights:
context['highlights'].append(highlight.getAttr())
response['workbench_nuggets'] = render_to_string("workbench-nuggets.html", context)
return HttpResponse(json.dumps(response), mimetype='application/json')
def api_change_to_nugget_1(request):
# input: highlight_id, output: one nugget
response = {}
context = {}
data_hl_id = request.REQUEST.get("data_hl_id")
hl = Highlight.objects.get(id = data_hl_id)
hl.is_nugget = True
hl.save()
context['highlight'] = hl.getAttr()
response['workbench_single_nugget'] = render_to_string("workbench-single-nugget.html", context)
return HttpResponse(json.dumps(response), mimetype='application/json')
def api_remove_nugget(request):
# input: highlight_ids, output: set as not nugget
response = {}
context = {}
hl_id = request.REQUEST.get("hl_id")
hl = Highlight.objects.get(id = hl_id)
# hl.is_nugget = False
# hl.save()
hl.delete()
return HttpResponse(json.dumps(response), mimetype='application/json')
def api_load_nugget_list(request):
response = {}
context = {}
theme_id = int(request.REQUEST.get("theme_id"))
docs = Doc.objects.filter(forum_id=request.session["forum_id"])
context['highlights'] = []
for doc in docs:
for section in doc.sections.all():
if (theme_id > 0):
highlights = section.highlights.filter(theme_id = theme_id)
else:
highlights = section.highlights.all()
for highlight in highlights:
context['highlights'].append(highlight.getAttr())
context['highlights'].sort(key = lambda x: x["created_at"], reverse=True)
response['workbench_nugget_list'] = render_to_string("workbench-nuggets.html", context)
return HttpResponse(json.dumps(response), mimetype='application/json')
def api_load_nugget_list_partial(request):
response = {}
context = {}
context['highlights'] = []
highlight_ids = request.REQUEST.get("highlight_ids")
highlight_ids = highlight_ids.split()
for highlight_id in highlight_ids:
highlight = Highlight.objects.get(id = highlight_id)
context['highlights'].append(highlight.getAttr())
response['workbench_nugget_list'] = render_to_string("workbench-nuggets.html", context)
return HttpResponse(json.dumps(response), mimetype='application/json')
def api_load_claim_list_partial(request):
response = {}
context = {}
context['highlights'] = []
highlight_id = request.REQUEST.get("highlight_id")
highlightClaims = HighlightClaim.objects.filter(highlight_id = highlight_id)
context["claims"] = []
for highlightClaim in highlightClaims:
claim = highlightClaim.claim
item = {}
item['date'] = utils.pretty_date(claim.updated_at)
item['content'] = unicode(ClaimVersion.objects.filter(claim_id = claim.id)[0]) + " (" + claim.claim_category + ")"
item['id'] = claim.id
item['author_name'] = claim.author.first_name + " " + claim.author.last_name
item['is_author'] = (request.user == claim.author)
item['highlight_ids'] = ""
for highlight in claim.source_highlights.all():
item['highlight_ids'] += (str(highlight.id) + " ")
item['highlight_ids'].strip(" ")
context["claims"].append(item)
response['workbench_claims'] = render_to_string("workbench-claims.html", context)
return HttpResponse(json.dumps(response), mimetype='application/json')
def api_edit_claim(request):
claim_id = request.REQUEST.get("claim_id")
content = request.REQUEST.get("content")
claim = Claim.objects.get(id = claim_id)
claim.content = content
claim.save()
response = {}
return HttpResponse(json.dumps(response), mimetype='application/json')
def api_get_claim_by_theme(request):
forum = Forum.objects.get(id=request.session['forum_id'])
response = {}
context = {}
theme_id = int(request.REQUEST.get('theme_id'))
claim_category = request.REQUEST.get('claim_category')
if (theme_id > 0):
claims = Claim.objects.filter(theme_id = theme_id, claim_category = claim_category)
else:
claims = Claim.objects.filter(forum = forum, claim_category = claim_category)
context["claims"] = []
for claim in claims:
item = {}
item['date'] = utils.pretty_date(claim.updated_at)
item['created_at'] = utils.pretty_date(claim.created_at)
item['created_at_used_for_sort'] = claim.created_at
item['content'] = unicode(ClaimVersion.objects.filter(claim_id = claim.id)[0])
item['id'] = claim.id
item['author_name'] = claim.author.first_name + " " + claim.author.last_name
item['is_author'] = (request.user == claim.author)
item['highlight_ids'] = ""
for highlight in claim.source_highlights.all():
item['highlight_ids'] += (str(highlight.id) + " ")
item['highlight_ids'].strip(" ")
context["claims"].append(item)
context['claims'].sort(key = lambda x: x["created_at_used_for_sort"], reverse=True)
response['workbench_claims'] = render_to_string("workbench-claims.html", context)
return HttpResponse(json.dumps(response), mimetype='application/json')
def api_others(request):
response = {}
action = request.REQUEST.get('action')
if action == 'create':
if not request.user.is_authenticated():
return HttpResponse("Please log in first.", status=403)
content = request.REQUEST.get('content')
content_type = request.REQUEST.get('type')
start = request.REQUEST.get('start')
end = request.REQUEST.get('end')
context_id = request.REQUEST.get('contextId')
# create highlight object
context = Entry.objects.get(id=context_id)
highlight = Highlight(start_pos=start, end_pos=end, context=context, author=request.user)
highlight.save()
response['highlight_id'] = highlight.id
# then create the content
now = timezone.now()
if 'actual_user_id' in request.session:
actual_author = User.objects.get(id=request.session['actual_user_id'])
else:
actual_author = None
if content_type == 'comment':
if actual_author:
Post.objects.create(forum_id=request.session['forum_id'], author=actual_author, delegator=request.user,
content=content, created_at=now, updated_at=now, highlight=highlight, content_type='comment')
else:
Post.objects.create(forum_id=request.session['forum_id'], author=request.user, content=content,
created_at=now, updated_at=now, highlight=highlight, content_type='comment')
elif content_type == 'question':
if actual_author:
Post.objects.create(forum_id=request.session['forum_id'], author=actual_author, delegator=request.user,
content=content, created_at=now, updated_at=now, highlight=highlight, content_type='question')
else:
Post.objects.create(forum_id=request.session['forum_id'], author=request.user, content=content,
created_at=now, updated_at=now, highlight=highlight, content_type='question')
elif content_type == 'claim':
claim_views._add_claim(request, highlight)
return HttpResponse(json.dumps(response), mimetype='application/json')
if action == 'load-doc':
doc_id = request.REQUEST.get('doc_id')
doc = Doc.objects.get(id=doc_id)
response['highlights'] = []
mytags = set()
alltags = set()
for section in doc.sections.all():
highlights = section.highlights.all()
for highlight in highlights:
highlight_info = highlight.getAttr()
response['highlights'].append(highlight_info)
if highlight_info['type'] == 'tag':
if highlight_info['author_id'] == request.user.id:
mytags.add(highlight_info['content'])
alltags.add(highlight_info['content'])
response['html'] = render_to_string('doc-tag-area.html', {'mytags': mytags, 'alltags': alltags})
return HttpResponse(json.dumps(response), mimetype='application/json')
| mit | -1,774,856,148,919,498,000 | 44.192719 | 136 | 0.640843 | false | 3.654545 | false | false | false |
italomaia/turtle-linux | games/DigbyMarshmallow/lib/menu.py | 1 | 21015 | import pyglet
from pyglet import app
from pyglet.event import EVENT_HANDLED
from pyglet.event import EVENT_UNHANDLED
from pyglet.window import key
from pyglet.gl import *
import mode
import caterpie
import config
from common import *
from constants import *
from content import levels, swags
import data
import os
import squirtle
import swag
class MenuMode(mode.Mode):
name = "menu"
downarrow_image = data.load_image("menu/down.svgz")
uparrow_image = data.load_image("menu/up.svgz")
star_image = data.load_image("menu/perfect.svgz")
diary_image = data.load_image("menu/diary.svgz")
questionmark_image = data.load_image("menu/questionmark.svgz")
back_image = data.load_image("menu/back.svgz")
bg_image = data.load_image("menu/background.svgz", anchor_x='center', anchor_y='center')
def __init__(self, default="main"):
self.control = None
self.window = None
self.title = caterpie.TextBox(
xpos = 0.0, ypos = 0.7,
width = 1.0, height = 0.3,
halign = "center", valign = "center",
padding = MENU_PADDING,
margin = MENU_MARGIN,
font_size = MENU_FONT_SIZE,
text = "The Space Adventures of Digby Marshmallow, " \
"Space Burglar Extraordinaire ...In Space!",
)
self.level_display = caterpie.TextBox(
xpos = 0.0, ypos = 0.7,
width = 1.0, height = 0.3,
halign = "center", valign = "center",
padding = MENU_PADDING,
margin = MENU_MARGIN,
font_size = MENU_FONT_SIZE,
text = "Select a level...",
expand = "both",
)
self.collection_display = caterpie.TextBox(
xpos = 0.0, ypos = 0.7,
width = 1.0, height = 0.3,
halign = "center", valign = "center",
padding = MENU_PADDING,
margin = MENU_MARGIN,
font_size = MENU_FONT_SIZE,
text = "Swag Collection",
expand = "both",
)
self.menu = caterpie.TextMenu(
xpos = 0.0, ypos = 0.1,
width = 1.0, height = 0.6,
halign = "center", valign="top",
padding = MENU_PADDING,
margin = MENU_MARGIN,
spacing = MENU_SPACING,
font_size = MENU_FONT_SIZE,
scrolling=True
)
highlight_color = (0.4, 0.3, 0.5, 0.6)
def arrow_set_mouse(self, x, y):
if (x, y) in self:
self.background = highlight_color
else:
style = caterpie.get_default_style()
self.background = style.background
self.downarrow = caterpie.ImageButton(
halign="center", valign="top",
graphic = self.downarrow_image,
callback = (self.menu.scroll_relative, 5),
)
self.downarrow.set_mouse = arrow_set_mouse.__get__(self.downarrow)
self.uparrow = caterpie.ImageButton(
height = 0.05, halign="center", valign="bottom",
graphic = self.uparrow_image,
callback = (self.menu.scroll_relative, -5),
)
self.uparrow.set_mouse = arrow_set_mouse.__get__(self.uparrow)
self.interface = [self.title, self.menu, self.level_display, self.collection_display]
self.states = {
"main" : [
("Start Game", (self.set_state, "levels")),
("Editor", self.do_editor),
("Collection", self.do_collection),
("Options", self.do_options),
("Quit", self.do_quit),
] if DEBUG else [
("Start Game", (self.set_state, "levels")),
("Collection", self.do_collection),
("Options", self.do_options),
("Quit", self.do_quit),
],
"death" : [
"You died!",
("Retry", self.do_retry_level),
("New level", (self.set_state, "levels")),
("Menu", (self.set_state, "main")),
],
"continue" : [
("Continue", self.do_next_level),
("Retry", self.do_retry_level),
("New level", (self.set_state, "levels")),
("Menu", (self.set_state, "main")),
],
"options" : [
("option:fullscreen", self.toggle_fullscreen),
("Back", (self.set_state, "main"))
],
"collection" : [],
"victory" : [],
}
self.scroll_states = ["levels"]
self.title_states = ["main", "options"]
self.menu_states = ["main", "options", "death", "continue"]
self.state = None
self.default = default
self.fade_level = 1.0
self.collection_back = None
self.collection_up = None
self.collection_down = None
def fade_in(self, callback):
self.fading = True
self.target_fade = 0
self.fade_callback = callback
self.window.remove_handlers(self.menu)
def fade_out(self, callback):
self.fading = True
self.target_fade = 1
self.fade_callback = callback
self.window.remove_handlers(self.menu)
def stop_fade(self):
self.fading = False
self.target_fade = self.fade_level
self.fade_callback = None
if self.window is not None:
if self.state in self.menu_states:
self.window.push_handlers(self.menu)
def tick(self):
if self.target_fade > self.fade_level:
self.fade_level = min(self.target_fade, self.fade_level + FADE_RATE)
elif self.target_fade < self.fade_level:
self.fade_level = max(self.target_fade, self.fade_level - FADE_RATE)
elif self.fading:
if isinstance(self.fade_callback, tuple):
func = self.fade_callback[0]
args = self.fade_callback[1:]
func(*args)
else:
self.fade_callback()
self.stop_fade()
if self.state in ('collection', 'victory'):
if self.control.keys[key.UP]:
self.collection.view_y += 10
elif self.control.keys[key.DOWN]:
self.collection.view_y -= 10
def connect(self, control):
self.control = control
self.window = control.window
for component in self.interface:
component.window = self.window
self.position_buttons()
self.prepare_levelicons()
self.control.music.start_song("ABreezeFromAlabama.mp3")
self.states['levels'] = []
for n in xrange(min(control.gamestate.current_max_level + 1, len(levels.levels))):
text = levels.levels[n][1]
if n in control.gamestate.best_swags:
text += ' (%d%%)' % (control.gamestate.best_swags[n][0],)
option_spec = (text, (self.do_start_game, n))
self.states['levels'].append(option_spec)
self.states['levels'].append(("Back", (self.set_state, "main")))
self.update_collection(self.control.gamestate.level_collected_swag if self.default == 'victory' else None)
self.set_state(self.default)
self.fade_in(lambda: None)
def update_collection(self, swag_list=None):
gs = self.control.gamestate
self.collection_elements = []
doc = pyglet.text.decode_attributed('')
total_value = 0
if swag_list is None:
title_text = 'Swag Collection'
else:
title_text = 'Victory!'
for cls, radius, name, img, value in swags.swags:
if swag_list is None:
total = 0
for n in gs.best_swags:
swag_dict = gs.best_swags[n][1]
if name in swag_dict:
total += swag_dict[name]
else:
total = swag_list.get(name, 0)
if total:
elt = squirtle.SVGElement(data.load_image(os.path.join('swag', img), anchor_x='center', anchor_y='center'), 0.02 * self.window.height, radius=radius * self.window.height/SCREEN_HEIGHT, width=self.window.width * .35)
self.collection_elements.append(elt)
doc.insert_element(len(doc.text), elt)
doc.insert_text(len(doc.text), '%d x %s ($%d) = $%d\n\n' % (total, name, value, total*value))
total_value += total * value
if swag_list is not None:
swag_val = 0
for a in gs.current_stage['actors']:
if isinstance(a, swag.Swag):
swag_val += a.value
title_text += ' (%d%%)' % (100 * total_value/swag_val,)
if total_value == swag_val:
title_text = 'Flawless ' + title_text
title_text += '\nTotal Value: $%d' % (total_value,)
self.collection_display.text = title_text
if doc.text:
doc.insert_text(len(doc.text), '\n\n\n')
doc.set_style(0, len(doc.text), {'font_name': "Fontdinerdotcom", 'font_size': 0.04 * self.window.height, 'color': (255, 255, 255, 255)})
self.collection = pyglet.text.layout.IncrementalTextLayout(
doc,
self.window.width * .9, self.window.height * .7,
multiline=True)
self.collection.content_valign = 'top'
self.collection.x = self.window.width * .05
self.collection.y = self.window.height * .65
self.collection.anchor_x = 'left'
self.collection.anchor_y = 'top'
sw, sh = self.window.get_size()
size = sh * 0.1
self.collection_back = caterpie.ImageButton(
xpos = sw * .05, ypos = sh * 0.1,
width = size, height = size,
callback = (self.set_state, 'continue' if swag_list is not None else 'main'),
graphic = self.back_image,
outline = None,
background = None,
)
def up():
self.collection.view_y += 50
def down():
self.collection.view_y -= 50
self.collection_up = caterpie.ImageButton(
xpos = sw * .95 - size, ypos = sh * 0.55,
width = size, height = size,
callback = up,
graphic = self.uparrow_image,
outline=None,
background=None,
)
self.collection_down = caterpie.ImageButton(
xpos = sw * .95 - size, ypos = sh * 0.05,
width = size, height = size,
callback = down,
graphic = self.downarrow_image,
outline=None,
background=None,
)
def disconnect(self):
for component in self.interface:
component.window = None
self.window.remove_handlers(self.menu)
self.window.remove_handlers(self.uparrow)
self.window.remove_handlers(self.downarrow)
self.window.remove_handlers(self.collection_back)
self.window.remove_handlers(self.collection_down)
self.window.remove_handlers(self.collection_up)
for icon in self.levelicons:
self.window.remove_handlers(icon)
self.window = None
self.control = None
def prepare_levelicons(self):
sw, sh = self.window.get_size()
li_size = sh * LEVELICON_SIZE
spacing = sh * LEVELICON_SPACING
y = spacing * 4 + li_size * 3
self.levelicons = []
self.currenticon = 0
for j in xrange(4):
x = (sw - li_size * 7 - spacing * 6) / 2
for i in xrange(7):
idx = 7*j+i
def set_mouse(btn, x, y):
if btn.idx == 27:
self.level_display.text = "Select a level...\n"
if (x, y) in btn:
self.hover_level(btn.idx)
if idx == 27:
graphic = self.back_image
elif idx > self.control.gamestate.current_max_level:
graphic = self.questionmark_image
elif levels.levels[idx][0].endswith(".scene"):
graphic = self.diary_image
else:
graphic = pyglet.image.load(data.file_path(os.path.join("screenshots", levels.levels[idx][3])))
w, h = graphic.width, graphic.height
graphic = graphic.get_region(int(.5 * w - .4 * h), 0, int(h*.8), int(h*.8))
if isinstance(graphic, pyglet.image.AbstractImage):
btn = caterpie.BitmapButton(xpos = x, ypos = y,
width = li_size, height = li_size,
padding = li_size / 10,
callback = (self.click_level, idx),
graphic = graphic,
outline=(0,0,0,1)
)
else:
btn = caterpie.ImageButton(
xpos = x, ypos = y,
width = li_size, height = li_size,
padding = li_size / 10,
callback = (self.click_level, idx),
graphic = graphic,
outline=(0,0,0,1)
)
btn.idx = idx
btn.set_mouse = set_mouse.__get__(btn)
self.levelicons.append(btn)
x += spacing + li_size
y -= spacing + li_size
self.hover_level(0)
def set_state(self, name):
if self.state == "levels":
for icon in self.levelicons:
self.window.remove_handlers(icon)
if self.state in ("collection", "victory"):
self.window.remove_handlers(self.collection_back)
self.window.remove_handlers(self.collection_down)
self.window.remove_handlers(self.collection_up)
if self.state == 'victory':
self.control.gamestate.finish_level(self.control)
self.update_collection()
if self.state in self.menu_states:
self.window.remove_handlers(self.menu)
self.state = name
self.menu.clear_options()
self.menu.add_options(*self.states[name])
if name in self.scroll_states:
self.menu.scrolling = True
self.position_buttons()
else:
self.menu.scrolling = False
if name in ('victory', 'collection'):
self.window.push_handlers(self.collection_back)
self.window.push_handlers(self.collection_down)
self.window.push_handlers(self.collection_up)
if name == "levels":
for icon in self.levelicons:
self.window.push_handlers(icon)
if self.state in self.menu_states:
self.window.push_handlers(self.menu)
def position_buttons(self):
bx, by, bw, bh = self.menu.box_shape
sw, sh = self.window.get_size()
bh = self.menu.height * sh - 2 * self.menu.margin * sh
self.uparrow.xpos = bx
self.uparrow.ypos = by + bh
self.uparrow.width = bw
self.uparrow.height = bh / 10
self.downarrow.xpos = bx
self.downarrow.ypos = by - bh / 10
self.downarrow.width = bw
self.downarrow.height = bh / 10
def click_level(self, idx):
if idx == 27:
self.set_state("main")
return
elif idx <= self.control.gamestate.current_max_level:
self.fade_out((self.control.gamestate.start_level, idx, self.control))
self.control.music.stop_song(1.0)
else:
pass
def hover_level(self, idx):
self.levelicons[self.currenticon].outline=(0,0,0,1)
self.currenticon = idx
self.levelicons[self.currenticon].outline=(1,1,1,1)
if idx == 27:
self.level_display.text = "Back..."
elif idx <= self.control.gamestate.current_max_level:
file, name, music, img = levels.levels[idx]
self.level_display.text = name
if idx in self.control.gamestate.best_swags:
self.level_display.text += ' (%d%%)' % (self.control.gamestate.best_swags[idx][0],)
else:
self.level_display.text = "????"
def do_next_level(self):
self.fade_out((self.control.gamestate.start_level, self.control.gamestate.current_level, self.control))
self.control.music.stop_song(1.0)
def do_retry_level(self):
self.fade_out((self.control.gamestate.start_level, self.control.gamestate.current_retry_level, self.control))
self.control.music.stop_song(1.0)
def do_start_game(self, n):
self.fade_out((self.control.gamestate.start_level, n, self.control))
self.control.music.stop_song(1.0)
def do_editor(self):
self.fade_out((self.control.switch_handler, "editor"))
self.control.music.stop_song(1.0)
def do_options(self):
self.set_state("options")
self.option_labels = {}
for opt in self.menu.options:
if opt.label.text.startswith("option:"):
self.option_labels[opt.label.text[7:]] = opt.label
suffix = ["Off", "On"][config.fullscreen]
self.option_labels["fullscreen"].text = "Fullscreen: %s" % suffix
def do_collection(self):
self.set_state("collection")
def do_quit(self):
self.fade_out(app.exit)
def toggle_fullscreen(self):
config.fullscreen = not config.fullscreen
config.save_option("fullscreen")
suffix = ["Off", "On"][config.fullscreen]
self.option_labels["fullscreen"].text = "Fullscreen: %s" % suffix
def on_key_press(self, sym, mods):
if self.state in ('collection', 'victory'):
if sym in (key.UP, key.DOWN):
return EVENT_UNHANDLED
if self.state == 'collection' and sym == key.LEFT:
self.set_state('main')
if self.state == 'victory':
self.set_state('continue')
nexticon = None
if self.state == 'levels':
if sym == key.UP:
nexticon = self.currenticon - LEVELS_PER_ROW
if sym == key.DOWN:
nexticon = self.currenticon + LEVELS_PER_ROW
if sym == key.LEFT:
nexticon = self.currenticon - 1
if sym == key.RIGHT:
nexticon = self.currenticon + 1
if nexticon is not None:
self.hover_level(nexticon % (LEVELS_PER_ROW * LEVEL_ROWS))
if sym == key.ENTER:
self.click_level(self.currenticon)
if sym == key.ESCAPE:
if self.state != 'main':
self.set_state('main')
return EVENT_HANDLED
return EVENT_UNHANDLED
def on_draw(self):
self.window.clear()
self.bg_image.draw(self.window.width / 2, self.window.height / 2, height=self.window.height)
if self.state in self.menu_states:
self.menu.draw()
if self.menu.scrolling:
self.uparrow.draw()
self.downarrow.draw()
elif self.state == "levels":
for icon in self.levelicons:
icon.draw()
if self.control.gamestate.best_swags.get(icon.idx, [0.0, {}])[0] == 100.0:
w = icon.width / 3
self.star_image.draw(icon.xpos, icon.ypos, width=w)
self.level_display.draw()
elif self.state in ("collection", "victory"):
sw, sh = self.window.get_size()
glColor4f(0,0,0, 0.8)
glBegin(GL_QUADS)
glVertex2f(sw * .1, 0)
glVertex2f(sw * .9, 0)
glVertex2f(sw * .9, sh * .7)
glVertex2f(sw * .1, sh * .7)
glEnd()
self.collection.draw()
glEnable(GL_SCISSOR_TEST)
glScissor(int(self.collection.x), int(self.collection.y - self.collection.height), int(self.collection.width), int(self.collection_display.box_shape[1] - self.collection.y + self.collection.height))
for elt in self.collection_elements:
elt.draw()
glDisable(GL_SCISSOR_TEST)
self.collection_display.draw()
self.collection_back.draw()
if self.collection.view_y > self.collection.height - self.collection.content_height + EPSILON:
self.collection_down.draw()
if self.collection.view_y:
self.collection_up.draw()
if self.state in self.title_states:
self.title.draw()
if self.fade_level:
sw, sh = self.window.get_size()
glColor4f(0, 0, 0, self.fade_level)
glBegin(GL_QUADS)
glVertex2f(0, 0)
glVertex2f(sw, 0)
glVertex2f(sw, sh)
glVertex2f(0, sh)
glEnd()
| gpl-3.0 | -1,687,625,542,057,058,800 | 37.844732 | 231 | 0.531668 | false | 3.675236 | true | false | false |
ltilve/ChromiumGStreamerBackend | build/android/incremental_install/create_install_script.py | 3 | 3754 | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Creates a script to run an "_incremental" .apk."""
import argparse
import os
import pprint
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, 'gyp'))
from pylib import constants
from util import build_utils
SCRIPT_TEMPLATE = """\
#!/usr/bin/env python
#
# This file was generated by:
# //build/android/incremental_install/create_install_script.py
import os
import subprocess
import sys
def main():
script_directory = os.path.dirname(__file__)
def resolve_path(path):
return os.path.abspath(os.path.join(script_directory, path))
cmd_path = resolve_path({cmd_path})
cmd_args = [cmd_path] + {cmd_args}
cmd_path_args = {cmd_path_args}
for arg, path in cmd_path_args:
if arg:
cmd_args.append(arg)
cmd_args.append(resolve_path(path))
return subprocess.call(cmd_args + sys.argv[1:])
if __name__ == '__main__':
sys.exit(main())
"""
def _ParseArgs(args):
args = build_utils.ExpandFileArgs(args)
parser = argparse.ArgumentParser()
build_utils.AddDepfileOption(parser)
parser.add_argument('--script-output-path',
help='Output path for executable script.',
required=True)
parser.add_argument('--output-directory',
help='Path to the root build directory.',
default='.')
parser.add_argument('--apk-path',
help='Path to the .apk to install.',
required=True)
parser.add_argument('--split',
action='append',
dest='splits',
default=[],
help='A glob matching the apk splits. '
'Can be specified multiple times.')
parser.add_argument('--lib-dir',
help='Path to native libraries directory.')
parser.add_argument('--dex-file',
action='append',
default=[],
dest='dex_files',
help='List of dex files to include.')
parser.add_argument('--dex-file-list',
help='GYP-list of dex files.')
options = parser.parse_args(args)
options.dex_files += build_utils.ParseGypList(options.dex_file_list)
return options
def main(args):
options = _ParseArgs(args)
def relativize(path):
return os.path.relpath(path, os.path.dirname(options.script_output_path))
installer_path = os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'android',
'incremental_install', 'installer.py')
installer_path = relativize(installer_path)
path_args = [
('--output-directory', relativize(options.output_directory)),
(None, relativize(options.apk_path)),
]
if options.lib_dir:
path_args.append(('--lib-dir', relativize(options.lib_dir)))
if options.dex_files:
for dex_file in options.dex_files:
path_args.append(('--dex-file', relativize(dex_file)))
for split_arg in options.splits:
path_args.append(('--split', relativize(split_arg)))
with open(options.script_output_path, 'w') as script:
script.write(SCRIPT_TEMPLATE.format(
cmd_path=pprint.pformat(installer_path),
cmd_args='[]',
cmd_path_args=pprint.pformat(path_args)))
os.chmod(options.script_output_path, 0750)
if options.depfile:
build_utils.WriteDepfile(
options.depfile,
build_utils.GetPythonDependencies())
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| bsd-3-clause | -3,433,314,086,942,398,000 | 28.793651 | 78 | 0.61561 | false | 3.765296 | false | false | false |
jimr/noterator | tests/test_twilio_plugin.py | 1 | 1042 | # -*- coding: utf-8 -*-
import mock
import unittest
from noterator import Noterator, TWILIO
from noterator.plugins.twilio import BASE_URL
class TestEmailPlugin(unittest.TestCase):
@mock.patch('noterator.plugins.twilio.requests')
def test_twilio_settings(self, requests):
cfg = {
'account_sid': '123456',
'token': 'twilio-token',
'from_number': '+987654',
'to_number': '+13579',
}
n = Noterator(range(5), TWILIO, config_file=None)
n.configure_plugin('twilio', **cfg)
for _ in n:
pass
url = '{}/Accounts/{}/Messages.json'.format(
BASE_URL, cfg['account_sid'],
)
payload = {
"From": cfg['from_number'],
"To": cfg['to_number'],
"Body": "{}: {}".format(
n.head, n._get_body(TWILIO, finished=True),
),
}
auth = (cfg['account_sid'], cfg['token'])
requests.post.assert_called_once_with(url, payload, auth=auth)
| mit | -3,016,599,767,701,781,000 | 27.162162 | 70 | 0.52975 | false | 3.580756 | false | false | false |
huntxu/neutron | neutron/db/migration/alembic_migrations/versions/pike/expand/c8c222d42aa9_logging_api.py | 4 | 2153 | # Copyright 2017 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from alembic import op
import sqlalchemy as sa
from neutron_lib.db import constants as db_const
"""logging api
Revision ID: c8c222d42aa9
Revises: 62c781cb6192
Create Date: 2017-05-30 11:51:08.173604
"""
# revision identifiers, used by Alembic.
revision = 'c8c222d42aa9'
down_revision = '62c781cb6192'
def upgrade():
op.create_table(
'logs',
sa.Column('project_id',
sa.String(length=db_const.PROJECT_ID_FIELD_SIZE),
nullable=True,
index=True),
sa.Column('id', sa.String(length=db_const.UUID_FIELD_SIZE),
nullable=False),
sa.Column('standard_attr_id', sa.BigInteger(), nullable=False),
sa.Column('name', sa.String(length=db_const.NAME_FIELD_SIZE),
nullable=True),
sa.Column('resource_type', sa.String(length=36), nullable=False),
sa.Column('resource_id', sa.String(length=db_const.UUID_FIELD_SIZE),
nullable=True,
index=True),
sa.Column('target_id', sa.String(length=db_const.UUID_FIELD_SIZE),
nullable=True,
index=True),
sa.Column('event', sa.String(length=255), nullable=False),
sa.Column('enabled', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.ForeignKeyConstraint(['standard_attr_id'],
['standardattributes.id'],
ondelete='CASCADE'),
sa.UniqueConstraint('standard_attr_id'))
| apache-2.0 | 6,610,654,773,696,697,000 | 34.883333 | 78 | 0.626568 | false | 3.851521 | false | false | false |
hkff/AccLab | pyAAL/AALtoAccmon.py | 1 | 13330 | """
AALtoDJfodtlmon
Copyright (C) 2016 Walid Benghabrit
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from datetime import datetime
from subprocess import Popen, PIPE
import os.path
import shutil
from AALMetaModel import *
__author__ = 'walid'
class MappingSpec:
"""
Service ->
Agent ->
Type ->
Clause ->
"""
def __init__(self):
self.services = []
self.agents = []
self.types = []
self.clauses = []
class BaseMap:
def __init__(self, name, target):
self.name = name.strip()
self.target = target.strip()
def __str__(self):
return "%s: %s => %s" % (self.__class__.__name__, self.name, self.target)
class ServiceMap(BaseMap):
pass
class AgentMap(BaseMap):
pass
class TypeMap(BaseMap):
pass
class ClauseMap(BaseMap):
def __init__(self, name, target, control_type):
super().__init__(name, target)
self.control_type = control_type.strip()
def __str__(self):
return "%s: %s => %s => %s" % (self.__class__.__name__, self.name, self.target, self.control_type)
# AALtoDJFODTLMON
def AALtoDJFODTLMON(mm, spec: MappingSpec, output_file=None):
"""
Translate AAL program to Djfodtlmon using a spec file
"""
log_attributes = []
custom_predicates = []
http_rules = []
view_rules = []
response_rules = []
header = "###\n# Mapping file for %s\n# created on %s\n###\n" % (mm.file, datetime.today())
###################################
# Attributes
###################################
log_attributes.append("""
## 1. User type log attribute
def log_user_type(request, view, args, kwargs, response):
# The evaluation function
# Do your custom logic to determine the user type
# The current django user can be accessed via : request.user
#
user_type = "..."
return P("USER_TYPE", args=[Constant(user_type)])
# Create and add the attribute
utype_log_attr = LogAttribute('USER_TYPE', enabled=True, eval_fx=log_user_type,
description='The type of the current user.', )
Sysmon.add_log_attribute(utype_log_attr, target=Monitor.MonType.HTTP)
""")
###################################
# Predicates/Functions
###################################
###################################
# Rules
###################################
for rule in spec.clauses:
control_type = "Monitor.MonControlType.POSTERIORI"
if rule.control_type == "REAL_TIME":
control_type = "Monitor.MonControlType.REAL_TIME"
formula = ""
clause = mm.clause(rule.name)
if clause is not None:
formula = aal_clause_to_fodtl(clause)
description = "Rule for clause %s" % rule.name
if rule.target == "HTTP":
http_rules.append('Sysmon.add_http_rule("%s", "%s", \n\tdescription="%s", control_type=%s)'
% (rule.name, formula, description, control_type))
elif rule.target == "VIEW":
view_rules.append('Sysmon.add_view_rule("%s", "%s", \n\tdescription="%s", control_type=%s)'
% (rule.name, formula, description, control_type))
elif rule.target == "RESPONSE":
response_rules.append('Sysmon.add_response_rule("%s", "%s", \n\tdescription="%s", control_type=%s)'
% (rule.name, formula, description, control_type))
###################################
# Result
###################################
res = """%s
from accmon.sysmon import *
from django.contrib.auth.models import User
################################
# Custom attributes to log
################################
%s
################################
# Custom predicates/functions
################################
%s
################################
# HTTP request rules
################################
%s
################################
# View rules
################################
%s
################################
# Response rules
################################
%s
""" % (header, "\n".join(log_attributes), "\n".join(custom_predicates), "\n".join(http_rules),
"\n".join(view_rules), "\n".join(response_rules))
return res
# Generate django skeleton app
def generate_django_skeleton(aal_file, spec_file, output_folder):
"""
NOTE : consider using AST modifications for source code
:param aal_file:
:param spec_file:
:param output_folder:
:return:
"""
project_name = "test1"
project_path = "examples/"
app_name = "app1"
spec_file = "tuto2_rules.py"
# 1. Remove previous project
if os.path.isdir("examples/%s" % project_name):
shutil.rmtree("examples/%s" % project_name)
# 2. Start project
p = Popen(['django-admin', 'startproject', project_name], stdout=PIPE, stderr=PIPE, stdin=PIPE)
# res = p.stdout.read().decode("utf-8")
res = p.stderr.read().decode("utf-8")
if res != "": return res
# 3. Create app
p = Popen(['python3', project_name+'/manage.py', 'startapp', app_name], stdout=PIPE, stderr=PIPE, stdin=PIPE)
res = p.stderr.read().decode("utf-8")
if res != "":
# Rollaback
if os.path.isdir(project_name): shutil.rmtree(project_name)
if os.path.isdir(app_name): shutil.rmtree(app_name)
return res
# 4. Configure fodtlmon
# 4.1 wsgi
wsgi = project_name + "/" + project_name + "/wsgi.py"
admin = "from django.contrib.auth.models import User\n# Create a superuser (for test only)\n" \
"if len(User.objects.filter(username='root')) == 0:\n"\
"\tUser.objects.create_superuser(username='root', password='root', email='')"
if not os.path.isfile(wsgi):
return "wsgi file doesn't exists !"
with open(wsgi, "a+") as f:
f.write("\nfrom accmon.sysmon import Sysmon\nSysmon.init()\nimport %s.%s\n\n%s\n"
% (project_name, spec_file.replace(".py", ""), admin))
# 4.2 settings
settings = project_name + "/" + project_name + "/settings.py"
if not os.path.isfile(settings):
return "settings file doesn't exists !"
res = ""
f = open(settings, "r")
res = f.read()
res = res.replace("'django.contrib.staticfiles',",
"'django.contrib.staticfiles',\n 'accmon',\n '%s'" % app_name)
res = res.replace("'django.middleware.security.SecurityMiddleware',",
"'django.middleware.security.SecurityMiddleware',\n 'accmon.middleware.FodtlmonMiddleware'")
f.close()
f = open(settings, "w")
f.flush()
f.write(res)
f.close()
# 4.3 urls
urls = project_name + "/" + project_name + "/urls.py"
if not os.path.isfile(urls):
return "urls file doesn't exists !"
res = ""
f = open(urls, "r")
res = f.read()
res = res.replace("from django.contrib import admin",
"from django.contrib import admin\nfrom accmon import urls as fodtlurls")
res = res.replace("url(r'^admin/', include(admin.site.urls)),",
"url(r'^admin/', include(admin.site.urls)),\n url(r'^mon/', include(fodtlurls.urlpatterns)),")
f.close()
f = open(urls, "w")
f.flush()
f.write(res)
f.close()
# Move app to the project path
shutil.move(app_name, project_name+"/")
# Migration
p = Popen(['python3', project_name+'/manage.py', 'makemigrations'], stdout=PIPE, stderr=PIPE, stdin=PIPE)
res = p.stderr.read().decode("utf-8")
if res != "":
# Rollaback
if os.path.isdir(project_name): shutil.rmtree(project_name)
if os.path.isdir(app_name): shutil.rmtree(app_name)
return res
p = Popen(['python3', project_name+'/manage.py', 'migrate'], stdout=PIPE, stderr=PIPE, stdin=PIPE)
res = p.stderr.read().decode("utf-8")
if res != "":
# Rollaback
if os.path.isdir(project_name): shutil.rmtree(project_name)
if os.path.isdir(app_name): shutil.rmtree(app_name)
return res
# Copy the spec file
shutil.copy(project_path + spec_file, project_name+"/"+project_name+"/"+spec_file.split("/")[-1])
# Move project
shutil.move(project_name, project_path)
return "Django !"
def aal_clause_to_fodtl(clause: m_clause):
"""
Transform an AAL clause into fodtl formula
:param clause:
:return:
"""
# TODO handle rectification
def transform(exp: aalmmnode, ref=False):
if isinstance(exp, m_clause):
return transform(exp.usage)
elif isinstance(exp, m_usage):
return transform(exp.actionExp[0])
elif isinstance(exp, m_aexpQvar):
qs = ""
for x in exp.qvars:
qs += transform(x) + "("
return "%s %s %s" % (qs, transform(exp.actionExp), (")"*(len(exp.qvars))))
elif isinstance(exp, m_qvar):
if ref:
return "%s" % exp.variable.name
else:
return "%s[%s]" % (exp.quant.to_ltl(), transform(exp.variable, ref=ref))
elif isinstance(exp, m_ref):
return transform(exp.target, ref=ref)
elif isinstance(exp, m_aexpComb):
return "(%s %s %s)" % (transform(exp.actionExp1), transform(exp.operator), transform(exp.actionExp2))
elif isinstance(exp, m_aexpIfthen):
return "((%s) => (%s))" % (transform(exp.condition), transform(exp.branchTrue))
elif isinstance(exp, m_booleanOp):
if exp == m_booleanOp.O_and: return "&"
elif exp == m_booleanOp.O_or: return "|"
elif exp == m_booleanOp.O_not: return "~"
elif exp == m_booleanOp.O_true: return "true"
elif exp == m_booleanOp.O_false: return "false"
else: return "<Unsupported boolean op %s>" % exp
elif isinstance(exp, m_aexpCondition):
return transform(exp.condition)
elif isinstance(exp, m_conditionCmp):
if exp.operator == m_booleanOp.O_equal:
return "EQUAL(%s,%s)" % (transform(exp.exp1, ref=True), transform(exp.exp2, ref=True))
elif exp.operator == m_booleanOp.O_inequal:
return "~EQUAL(%s,%s)" % (transform(exp.exp1, ref=True), transform(exp.exp2, ref=True))
else:
return "(%s %s %s)" % (transform(exp.exp1), transform(exp.operator), transform(exp.exp2))
elif isinstance(exp, m_conditionNotComb):
return "(%s)" % (transform(exp.exp)) if exp.operator is None else "~(%s)" % (transform(exp.exp))
elif isinstance(exp, m_conditionComb):
return "(%s %s %s)" % (transform(exp.cond1), transform(exp.operator), transform(exp.cond2))
elif isinstance(exp, m_predicate):
q = []
for x in exp.args:
x = str(x)
if x[0] == '"' and x[-1] == '"':
x = 'r\\"%s\\"' % x[1:-1]
q.append(str(x))
return "%s(%s)" % (exp.name, ", ".join(q))
elif isinstance(exp, m_aexpModal):
return "%s(%s)" % (transform(exp.modality), transform(exp.actionExpression))
elif isinstance(exp, m_modal):
if exp == m_modal.T_always: return "G"
elif exp == m_modal.T_must: return "F"
elif exp == m_modal.T_unless: return "R"
elif exp == m_modal.T_until: return "U"
elif exp == m_modal.T_sometime: return "F"
else: return "<Unsupported boolean op %s>" % exp
elif isinstance(exp, m_aexpAuthor):
return "%s%s" % (transform(exp.author), transform(exp.action))
elif isinstance(exp, m_aexpAction):
return "%s" % transform(exp.action)
elif isinstance(exp, m_author):
return "P" if exp == m_author.A_permit else "~P"
elif isinstance(exp, m_action):
return "%s(%s, %s, %s)" % (exp.service, transform(exp.agent1, ref=True),
transform(exp.agent2, ref=True), transform(exp.args, ref=True))
elif isinstance(exp, m_agent):
return "'%s'" % exp.name
elif isinstance(exp, m_varAttr):
return "%s(%s)" % (exp.attribute, exp.variable)
elif isinstance(exp, m_variable):
if ref:
return "%s" % exp.name
else:
return "%s" % exp
elif isinstance(exp, m_constant):
return "%s" % exp.name
elif isinstance(exp, str):
return exp
else:
return "<Unsupported type %s>" % exp.__class__.__name__
return "G(%s)" % transform(clause)
| gpl-3.0 | 1,310,629,162,326,871,600 | 33.984252 | 119 | 0.550379 | false | 3.720067 | false | false | false |
bnaul/scikit-learn | sklearn/preprocessing/_discretization.py | 3 | 13192 | # -*- coding: utf-8 -*-
# Author: Henry Lin <[email protected]>
# Tom Dupré la Tour
# License: BSD
import numbers
import numpy as np
import warnings
from . import OneHotEncoder
from ..base import BaseEstimator, TransformerMixin
from ..utils.validation import check_array
from ..utils.validation import check_is_fitted
from ..utils.validation import _deprecate_positional_args
class KBinsDiscretizer(TransformerMixin, BaseEstimator):
"""
Bin continuous data into intervals.
Read more in the :ref:`User Guide <preprocessing_discretization>`.
.. versionadded:: 0.20
Parameters
----------
n_bins : int or array-like, shape (n_features,) (default=5)
The number of bins to produce. Raises ValueError if ``n_bins < 2``.
encode : {'onehot', 'onehot-dense', 'ordinal'}, (default='onehot')
Method used to encode the transformed result.
onehot
Encode the transformed result with one-hot encoding
and return a sparse matrix. Ignored features are always
stacked to the right.
onehot-dense
Encode the transformed result with one-hot encoding
and return a dense array. Ignored features are always
stacked to the right.
ordinal
Return the bin identifier encoded as an integer value.
strategy : {'uniform', 'quantile', 'kmeans'}, (default='quantile')
Strategy used to define the widths of the bins.
uniform
All bins in each feature have identical widths.
quantile
All bins in each feature have the same number of points.
kmeans
Values in each bin have the same nearest center of a 1D k-means
cluster.
dtype : {np.float32, np.float64}, default=None
The desired data-type for the output. If None, output dtype is
consistent with input dtype. Only np.float32 and np.float64 are
supported.
Attributes
----------
n_bins_ : int array, shape (n_features,)
Number of bins per feature. Bins whose width are too small
(i.e., <= 1e-8) are removed with a warning.
bin_edges_ : array of arrays, shape (n_features, )
The edges of each bin. Contain arrays of varying shapes ``(n_bins_, )``
Ignored features will have empty arrays.
See Also
--------
sklearn.preprocessing.Binarizer : Class used to bin values as ``0`` or
``1`` based on a parameter ``threshold``.
Notes
-----
In bin edges for feature ``i``, the first and last values are used only for
``inverse_transform``. During transform, bin edges are extended to::
np.concatenate([-np.inf, bin_edges_[i][1:-1], np.inf])
You can combine ``KBinsDiscretizer`` with
:class:`~sklearn.compose.ColumnTransformer` if you only want to preprocess
part of the features.
``KBinsDiscretizer`` might produce constant features (e.g., when
``encode = 'onehot'`` and certain bins do not contain any data).
These features can be removed with feature selection algorithms
(e.g., :class:`~sklearn.feature_selection.VarianceThreshold`).
Examples
--------
>>> X = [[-2, 1, -4, -1],
... [-1, 2, -3, -0.5],
... [ 0, 3, -2, 0.5],
... [ 1, 4, -1, 2]]
>>> est = KBinsDiscretizer(n_bins=3, encode='ordinal', strategy='uniform')
>>> est.fit(X)
KBinsDiscretizer(...)
>>> Xt = est.transform(X)
>>> Xt # doctest: +SKIP
array([[ 0., 0., 0., 0.],
[ 1., 1., 1., 0.],
[ 2., 2., 2., 1.],
[ 2., 2., 2., 2.]])
Sometimes it may be useful to convert the data back into the original
feature space. The ``inverse_transform`` function converts the binned
data into the original feature space. Each value will be equal to the mean
of the two bin edges.
>>> est.bin_edges_[0]
array([-2., -1., 0., 1.])
>>> est.inverse_transform(Xt)
array([[-1.5, 1.5, -3.5, -0.5],
[-0.5, 2.5, -2.5, -0.5],
[ 0.5, 3.5, -1.5, 0.5],
[ 0.5, 3.5, -1.5, 1.5]])
"""
@_deprecate_positional_args
def __init__(self, n_bins=5, encode='onehot', strategy='quantile',
dtype=None):
self.n_bins = n_bins
self.encode = encode
self.strategy = strategy
self.dtype = dtype
def fit(self, X, y=None):
"""
Fit the estimator.
Parameters
----------
X : numeric array-like, shape (n_samples, n_features)
Data to be discretized.
y : None
Ignored. This parameter exists only for compatibility with
:class:`~sklearn.pipeline.Pipeline`.
Returns
-------
self
"""
X = self._validate_data(X, dtype='numeric')
supported_dtype = (np.float64, np.float32)
if self.dtype in supported_dtype:
output_dtype = self.dtype
elif self.dtype is None:
output_dtype = X.dtype
else:
raise ValueError(
f"Valid options for 'dtype' are "
f"{supported_dtype + (None,)}. Got dtype={self.dtype} "
f" instead."
)
valid_encode = ('onehot', 'onehot-dense', 'ordinal')
if self.encode not in valid_encode:
raise ValueError("Valid options for 'encode' are {}. "
"Got encode={!r} instead."
.format(valid_encode, self.encode))
valid_strategy = ('uniform', 'quantile', 'kmeans')
if self.strategy not in valid_strategy:
raise ValueError("Valid options for 'strategy' are {}. "
"Got strategy={!r} instead."
.format(valid_strategy, self.strategy))
n_features = X.shape[1]
n_bins = self._validate_n_bins(n_features)
bin_edges = np.zeros(n_features, dtype=object)
for jj in range(n_features):
column = X[:, jj]
col_min, col_max = column.min(), column.max()
if col_min == col_max:
warnings.warn("Feature %d is constant and will be "
"replaced with 0." % jj)
n_bins[jj] = 1
bin_edges[jj] = np.array([-np.inf, np.inf])
continue
if self.strategy == 'uniform':
bin_edges[jj] = np.linspace(col_min, col_max, n_bins[jj] + 1)
elif self.strategy == 'quantile':
quantiles = np.linspace(0, 100, n_bins[jj] + 1)
bin_edges[jj] = np.asarray(np.percentile(column, quantiles))
elif self.strategy == 'kmeans':
from ..cluster import KMeans # fixes import loops
# Deterministic initialization with uniform spacing
uniform_edges = np.linspace(col_min, col_max, n_bins[jj] + 1)
init = (uniform_edges[1:] + uniform_edges[:-1])[:, None] * 0.5
# 1D k-means procedure
km = KMeans(n_clusters=n_bins[jj], init=init, n_init=1)
centers = km.fit(column[:, None]).cluster_centers_[:, 0]
# Must sort, centers may be unsorted even with sorted init
centers.sort()
bin_edges[jj] = (centers[1:] + centers[:-1]) * 0.5
bin_edges[jj] = np.r_[col_min, bin_edges[jj], col_max]
# Remove bins whose width are too small (i.e., <= 1e-8)
if self.strategy in ('quantile', 'kmeans'):
mask = np.ediff1d(bin_edges[jj], to_begin=np.inf) > 1e-8
bin_edges[jj] = bin_edges[jj][mask]
if len(bin_edges[jj]) - 1 != n_bins[jj]:
warnings.warn('Bins whose width are too small (i.e., <= '
'1e-8) in feature %d are removed. Consider '
'decreasing the number of bins.' % jj)
n_bins[jj] = len(bin_edges[jj]) - 1
self.bin_edges_ = bin_edges
self.n_bins_ = n_bins
if 'onehot' in self.encode:
self._encoder = OneHotEncoder(
categories=[np.arange(i) for i in self.n_bins_],
sparse=self.encode == 'onehot',
dtype=output_dtype)
# Fit the OneHotEncoder with toy datasets
# so that it's ready for use after the KBinsDiscretizer is fitted
self._encoder.fit(np.zeros((1, len(self.n_bins_))))
return self
def _validate_n_bins(self, n_features):
"""Returns n_bins_, the number of bins per feature.
"""
orig_bins = self.n_bins
if isinstance(orig_bins, numbers.Number):
if not isinstance(orig_bins, numbers.Integral):
raise ValueError("{} received an invalid n_bins type. "
"Received {}, expected int."
.format(KBinsDiscretizer.__name__,
type(orig_bins).__name__))
if orig_bins < 2:
raise ValueError("{} received an invalid number "
"of bins. Received {}, expected at least 2."
.format(KBinsDiscretizer.__name__, orig_bins))
return np.full(n_features, orig_bins, dtype=int)
n_bins = check_array(orig_bins, dtype=int, copy=True,
ensure_2d=False)
if n_bins.ndim > 1 or n_bins.shape[0] != n_features:
raise ValueError("n_bins must be a scalar or array "
"of shape (n_features,).")
bad_nbins_value = (n_bins < 2) | (n_bins != orig_bins)
violating_indices = np.where(bad_nbins_value)[0]
if violating_indices.shape[0] > 0:
indices = ", ".join(str(i) for i in violating_indices)
raise ValueError("{} received an invalid number "
"of bins at indices {}. Number of bins "
"must be at least 2, and must be an int."
.format(KBinsDiscretizer.__name__, indices))
return n_bins
def transform(self, X):
"""
Discretize the data.
Parameters
----------
X : numeric array-like, shape (n_samples, n_features)
Data to be discretized.
Returns
-------
Xt : numeric array-like or sparse matrix
Data in the binned space.
"""
check_is_fitted(self)
# check input and attribute dtypes
dtype = (np.float64, np.float32) if self.dtype is None else self.dtype
Xt = check_array(X, copy=True, dtype=dtype)
n_features = self.n_bins_.shape[0]
if Xt.shape[1] != n_features:
raise ValueError("Incorrect number of features. Expecting {}, "
"received {}.".format(n_features, Xt.shape[1]))
bin_edges = self.bin_edges_
for jj in range(Xt.shape[1]):
# Values which are close to a bin edge are susceptible to numeric
# instability. Add eps to X so these values are binned correctly
# with respect to their decimal truncation. See documentation of
# numpy.isclose for an explanation of ``rtol`` and ``atol``.
rtol = 1.e-5
atol = 1.e-8
eps = atol + rtol * np.abs(Xt[:, jj])
Xt[:, jj] = np.digitize(Xt[:, jj] + eps, bin_edges[jj][1:])
np.clip(Xt, 0, self.n_bins_ - 1, out=Xt)
if self.encode == 'ordinal':
return Xt
dtype_init = None
if 'onehot' in self.encode:
dtype_init = self._encoder.dtype
self._encoder.dtype = Xt.dtype
try:
Xt_enc = self._encoder.transform(Xt)
finally:
# revert the initial dtype to avoid modifying self.
self._encoder.dtype = dtype_init
return Xt_enc
def inverse_transform(self, Xt):
"""
Transform discretized data back to original feature space.
Note that this function does not regenerate the original data
due to discretization rounding.
Parameters
----------
Xt : numeric array-like, shape (n_sample, n_features)
Transformed data in the binned space.
Returns
-------
Xinv : numeric array-like
Data in the original feature space.
"""
check_is_fitted(self)
if 'onehot' in self.encode:
Xt = self._encoder.inverse_transform(Xt)
Xinv = check_array(Xt, copy=True, dtype=(np.float64, np.float32))
n_features = self.n_bins_.shape[0]
if Xinv.shape[1] != n_features:
raise ValueError("Incorrect number of features. Expecting {}, "
"received {}.".format(n_features, Xinv.shape[1]))
for jj in range(n_features):
bin_edges = self.bin_edges_[jj]
bin_centers = (bin_edges[1:] + bin_edges[:-1]) * 0.5
Xinv[:, jj] = bin_centers[np.int_(Xinv[:, jj])]
return Xinv
| bsd-3-clause | 1,213,273,688,610,247,700 | 36.157746 | 79 | 0.54052 | false | 3.964833 | false | false | false |
lsblakk/koc_khan_offline | code/iafeedmanager.py | 1 | 1033 | #! /usr/bin/env python
import urllib
from xml.dom import minidom, Node
import json
#1)get daa from internet archive in json format 2)clean the json to get identifier, format, title, date, and description
ka_archive_json = 'http://www.archive.org/advancedsearch.php?q=collection%3A%22khanacademy%22&fl%5B%5D=collection&fl%5B%5D=date&fl%5B%5D=description&fl%5B%5D=format&fl%5B%5D=identifier&fl%5B%5D=publicdate&fl%5B%5D=title&sort%5B%5D=&sort%5B%5D=&sort%5B%5D=&rows=2735&page=1&output=json'
json_dictionary = json.load(urllib.urlopen(ka_archive_json))
docs = json_dictionary['response']['docs']
clean = {}
for doc in docs:
identifier = doc['identifier']
clean[identifier] = {}
clean[identifier]['format'] = doc['format']
clean[identifier]['title'] = doc['title']
clean[identifier]['publicdate'] = doc['publicdate']
if doc.has_key('description'):
clean[identifier]['description'] = doc['description']
else:
clean[identifier]['description'] = "No decsription available"
print clean
print len(clean)
| bsd-3-clause | -2,656,630,267,411,001,300 | 29.382353 | 285 | 0.7212 | false | 2.959885 | false | false | false |
cihatix/FuzzManager | server/ec2spotmanager/management/commands/start_stats_daemon.py | 3 | 4429 | from django.core.management.base import NoArgsCommand
from ec2spotmanager.models import PoolUptimeDetailedEntry, PoolUptimeAccumulatedEntry, InstancePool, Instance, INSTANCE_STATE
from django.conf import settings
from ec2spotmanager.management.common import pid_lock_file
import time
import logging
from django.utils import timezone
from django.db.models.query_utils import Q
stats_delta_secs = 60*15 # 30 minutes
stats_total_detailed = 24 # How many hours the detailed statistics should include
stats_total_accumulated = 30 # How many days should we keep accumulated statistics
class Command(NoArgsCommand):
help = "Check the status of all bugs we have"
@pid_lock_file("stats_daemon")
def handle_noargs(self, **options):
print(options)
while True:
self.check_instance_pools()
time.sleep(60)
def check_instance_pools(self):
instance_pools = InstancePool.objects.all()
# Process all instance pools
for pool in instance_pools:
if not pool.isEnabled:
continue
current_delta = timezone.datetime.now() - timezone.timedelta(seconds=stats_delta_secs)
entries = PoolUptimeDetailedEntry.objects.filter(pool=pool, created__gte = current_delta)
# We should never have more than one entry per time-delta
assert(len(entries) < 2)
if entries:
current_delta_entry = entries[0]
else:
# Create a new entry
current_delta_entry = PoolUptimeDetailedEntry()
current_delta_entry.pool = pool
current_delta_entry.target = pool.config.flatten().size
actual = len(Instance.objects.filter(pool=pool).filter(Q(status_code=INSTANCE_STATE['pending'])| Q(status_code=INSTANCE_STATE['running'])))
if current_delta_entry.actual == None or actual < current_delta_entry.actual:
current_delta_entry.actual = actual
# This will only save if necessary, i.e. if the entry already existed and the values
# have not changed, this will not cause I/O on the database with Django >=1.5
current_delta_entry.save()
# Now check if we need to aggregate some of the detail entries we have
entries = PoolUptimeDetailedEntry.objects.filter(pool=pool).order_by('created')
n = len(entries) - (stats_total_detailed*60*60)/stats_delta_secs
if n > 0:
# We need to aggregate some entries
entriesAggr = entries[0:n]
for entry in entriesAggr:
# Figure out if we have an aggregated entry already with the same date
day_entries = PoolUptimeAccumulatedEntry.objects.filter(pool=pool, created__contains=entry.created.date())
# We should never have more than one entry per day
assert(len(day_entries) < 2)
if day_entries:
day_entry = day_entries[0]
else:
day_entry = PoolUptimeAccumulatedEntry()
day_entry.pool = pool
day_entry.created = entry.created
day_entry.uptime_percentage = 0.0
entry_percentage = (float(entry.actual) / entry.target) * 100
new_uptime_percentage = ((float(day_entry.uptime_percentage) * day_entry.accumulated_count) + entry_percentage) / (day_entry.accumulated_count + 1)
day_entry.uptime_percentage = new_uptime_percentage
day_entry.accumulated_count = day_entry.accumulated_count + 1
day_entry.save()
# We can now delete our entry
entry.delete()
# Finally check if we need to expire some accumulated entries
entries = PoolUptimeAccumulatedEntry.objects.filter(pool=pool).order_by('created')
n = len(entries) - stats_total_accumulated
if n > 0:
for entry in entries[0:n]:
entry.delete() | mpl-2.0 | -5,870,328,385,499,014,000 | 45.631579 | 167 | 0.573267 | false | 4.686772 | false | false | false |
AlexOugh/horizon | openstack_dashboard/api/nikola/nikola_api.py | 1 | 3084 |
#import logging
import httplib, urllib
import simplejson as json
class NikolaAPI():
def _get_headers(self):
#if self.cookie is None: raise Exception, "no cookie available"
headers = {'content-type':'text/plain'}
#headers['cookie'] = self.cookie
return headers
def _request_get(self, server, url, params):
try:
conn = httplib.HTTPConnection(server)
headers = self._get_headers()
task_url = '%s?%s' % (url, urllib.urlencode(params))
conn.request("GET", task_url, headers=headers)
response = conn.getresponse()
content = response.read()
#print "######"
#print content
'''res_json = json.loads(content).values()[0]
if 'errorcode' in res_json:
raise Exception, res_json['errortext']
if ret_attr_name:
if ret_attr_name in res_json:
return res_json[ret_attr_name]
else:
return []'''
res_json = json.loads(content)
return res_json
except Exception, ex:
print ex
raise ex
def _request_post(self, server, url, param_json):
try:
conn = httplib.HTTPConnection(server)
headers = self._get_headers()
if param_json:
if headers:
conn.request("POST", url, json.dumps(param_json), headers=headers)
else:
conn.request("POST", url, json.dumps(param_json))
else:
if headers:
conn.request("POST", url, headers=headers)
else:
conn.request("POST", url)
response = conn.getresponse()
content = response.read()
#print "######"
#print content
res_json = json.loads(content)
return res_json
except Exception, ex:
print ex
raise ex
def send(self, host='172.20.1.14:3000', url='/useast1/nikola/r2/', method='POST', data=None):
request_type = method
if data:
params = json.loads(data)
#print params
else:
params = None
if params:
for key in params.keys():
type_name = type(params[key]).__name__
if type_name == 'dict' or type_name == 'list':
params[key] = json.dumps(params[key])
elif type_name != 'str':
params[key] = str(params[key])
#params = json.dumps(params)
#print params
if request_type == 'POST':
ret = self._request_post(host, url, params)
else:
ret = self._request_get(host, url, params)
#ret = self._request(host, url, request_type, data)
#print ret
return {'result':ret}
| apache-2.0 | -8,247,911,462,868,706,000 | 31.16129 | 97 | 0.475357 | false | 4.515373 | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.