repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
I-sektionen/i-portalen | wsgi/iportalen_django/exchange_portal/models.py | 1 | 6767 | __author__ = 'Magnus Forzelius & Jesper Lehtonen'
from django.db import models
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from utils.validators import liu_id_validator
from tags.models import Tag
import os
import datetime
class Continent(models.Model):
name = models.CharField(max_length=50)
class Meta:
verbose_name = _("Världsdel")
verbose_name_plural = _("Världsdelar")
ordering = ['name']
def __str__(self):
return self.name
class Country(models.Model):
name = models.CharField(max_length=50)
in_continent = models.ForeignKey(Continent, on_delete=models.CASCADE, null=True)
class Meta:
verbose_name = _("Land")
verbose_name_plural = _("Länder")
ordering = ['name']
def __str__(self):
return self.name
def get_country_list(self):
return self.objects.all()
class City(models.Model):
name = models.CharField(max_length=50)
in_country = models.ForeignKey(Country, on_delete=models.CASCADE)
class Meta:
verbose_name = _("Stad")
verbose_name_plural = _("Städer")
ordering = ['name']
def __str__(self):
return self.name
class School(models.Model):
name = models.CharField(max_length=50)
in_city = models.ForeignKey(City, on_delete=models.CASCADE)
freemover = models.BooleanField(default=False)
exchange_with_liu = models.BooleanField(default=False)
class Meta:
verbose_name = _("Skola")
verbose_name_plural = _("Skolor")
ordering = ['name']
def __str__(self):
return self.name
# Lägga till: HP, Nivå,
class Liu_Course(models.Model):
name = models.CharField(max_length=50)
course_code = models.CharField(max_length=20)
liu_hp = models.IntegerField(default=0)
level = models.CharField(max_length=10, default='N/A')
is_compulsary = models.BooleanField(default=False)
class Meta:
verbose_name = _("Liukurs")
verbose_name_plural = _("Liukurser")
def __str__(self):
return self.name
class Exchange_Course(models.Model):
# Kanske expandera
TECH_PROFILES = (
('D', "D"),
('E', "E"),
('M', "M"),
('B', "B"),
('S', "S"),
('Övr', "Övr")
)
name = models.CharField(max_length=50)
course_code = models.CharField(max_length=20)
year = models.IntegerField()
technical_profile = models.CharField(verbose_name='Teknisk inriktning', max_length=12, choices=TECH_PROFILES, default='NONE')
in_school = models.ForeignKey(School, on_delete=models.CASCADE)
corresponding_liu_course = models.ForeignKey(Liu_Course, on_delete=models.CASCADE)
credits = models.IntegerField(default=0)
level = models.CharField(max_length=10, default='N/A')
# Add attribute "credits", and att the credits/hp quota in school model
# A course can be required in several tech profile
class Meta:
verbose_name = _("Utlandskurs")
verbose_name_plural = _("Utlandskurser")
ordering = ['name']
def __str__(self):
return self.name
def _file_path(instance, filename):
return os.path.join(
'travel_stories', str(instance.about_school.pk), filename
)
class Travel_Story(models.Model):
TERM_CHOICES = (
("HT", "HT"),
("VT", "VT"),
("Helår", "Helår")
)
YEAR_CHOICES = []
for y in range(1969, (datetime.datetime.now().year + 1)):
YEAR_CHOICES.append((y, y))
#Change from file to form
about_school = models.ForeignKey(School, on_delete=models.CASCADE)
added_by_user = models.CharField(verbose_name=_("liu-id"), max_length=10)
term_abroad = models.CharField(verbose_name=("termin utomlands"), help_text="Termin du var utomlands", max_length=5, choices=TERM_CHOICES)
year_abroad = models.IntegerField(verbose_name=("tid utomlands"), help_text="År när du var utomlands", choices=YEAR_CHOICES)
headline = models.CharField(
verbose_name=_("rubrik"),
max_length=255,
help_text=_("Rubriken till reseberättelsen"))
lead = models.TextField(
verbose_name=_("ingress"),
help_text=_("Ingressen är den text som syns i reseberättelse"))
prep_text = models.TextField(
verbose_name=_("förberedelser"),
help_text=_("Var det några särskilda förberedelser som krävdes?
Har du några generella tips gällande ansökan? Visum?"),
null=True,
blank=True)
location_text = models.TextField(
verbose_name=_("landet och staden"),
help_text=_("Hur upplevdes landet? Staden? Kultur? Billigt eller dyrt?"),
null=True,
blank=True)
school_text = models.TextField(
verbose_name=_("skolan"),
help_text=_("Geografisk placering i staden?
Hur var campus?
Var det lätt att träffa lokalbefolkning?
Hur var studentlivet? Kurser: var det lätt/svårt att få kurser? Var de lätta/svåra att få tillgodoräknade?"),
null=True,
blank=True)
studies_text = models.TextField(
verbose_name=_("studier"),
help_text=_("Hur var nivån på kurserna?
Råd angående att välja kurser på plats?
Svårt att hitta kurser på engelska?
Hur var språket? (framförallt för de som läser ii eller som inte läste på engelska)"),
null=True,
blank=True)
living_text = models.TextField(
verbose_name=_("boende"),
help_text=_("Hur bodde du?
Hur hittade du ditt boende? Tips på eventuell mäklare eller liknande? Vilka alternativ finns?
Priser och standard?
"),
null=True,
blank=True)
sparetime_text = models.TextField(
verbose_name=_("fritid"),
help_text=_("Vad gör man på fritiden?
Resor?
Tips på saker man inte får missa"),
null=True,
blank=True)
other_text = models.TextField(
verbose_name=_("övrigt"),
help_text=_("Brödtext syns när en reseberättelse visas enskilt."),
null=True,
blank=True)
#tags = models.ManyToManyField(
# Tag,
# verbose_name=_("tag"),
# blank=True,
# help_text=_("Håll ner Ctrl för att markera flera."))
class Meta:
verbose_name = _("Reseberättelse")
verbose_name_plural = _("Reseberättelser")
ordering= ['-year_abroad','term_abroad']
def __str__(self):
return self.headline
class Feedback(models.Model):
message = models.CharField(max_length=500)
def __str__(self):
return self.message
#def get_absolute_url(self):
# """Get url of object"""
# return reverse(self)
#self.about_school
| mit | 4,308,718,308,841,344,500 | 30.976077 | 142 | 0.634595 | false |
citrix-openstack-build/neutron | neutron/plugins/ml2/rpc.py | 1 | 9476 | # Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.agent import securitygroups_rpc as sg_rpc
from neutron.common import constants as q_const
from neutron.common import rpc as q_rpc
from neutron.common import topics
from neutron.db import agents_db
from neutron.db import api as db_api
from neutron.db import dhcp_rpc_base
from neutron.db import securitygroups_rpc_base as sg_db_rpc
from neutron import manager
from neutron.openstack.common import log
from neutron.openstack.common.rpc import proxy
from neutron.plugins.ml2 import db
from neutron.plugins.ml2 import driver_api as api
from neutron.plugins.ml2.drivers import type_tunnel
# REVISIT(kmestery): Allow the type and mechanism drivers to supply the
# mixins and eventually remove the direct dependencies on type_tunnel.
LOG = log.getLogger(__name__)
TAP_DEVICE_PREFIX = 'tap'
TAP_DEVICE_PREFIX_LENGTH = 3
class RpcCallbacks(dhcp_rpc_base.DhcpRpcCallbackMixin,
sg_db_rpc.SecurityGroupServerRpcCallbackMixin,
type_tunnel.TunnelRpcCallbackMixin):
RPC_API_VERSION = '1.1'
# history
# 1.0 Initial version (from openvswitch/linuxbridge)
# 1.1 Support Security Group RPC
def __init__(self, notifier, type_manager):
# REVISIT(kmestery): This depends on the first three super classes
# not having their own __init__ functions. If an __init__() is added
# to one, this could break. Fix this and add a unit test to cover this
# test in H3.
super(RpcCallbacks, self).__init__(notifier, type_manager)
def create_rpc_dispatcher(self):
'''Get the rpc dispatcher for this manager.
If a manager would like to set an rpc API version, or support more than
one class as the target of rpc messages, override this method.
'''
return q_rpc.PluginRpcDispatcher([self,
agents_db.AgentExtRpcCallback()])
@classmethod
def _device_to_port_id(cls, device):
# REVISIT(rkukura): Consider calling into MechanismDrivers to
# process device names, or having MechanismDrivers supply list
# of device prefixes to strip.
if device.startswith(TAP_DEVICE_PREFIX):
return device[TAP_DEVICE_PREFIX_LENGTH:]
else:
return device
@classmethod
def get_port_from_device(cls, device):
port_id = cls._device_to_port_id(device)
port = db.get_port_and_sgs(port_id)
if port:
port['device'] = device
return port
def get_device_details(self, rpc_context, **kwargs):
"""Agent requests device details."""
agent_id = kwargs.get('agent_id')
device = kwargs.get('device')
LOG.debug(_("Device %(device)s details requested by agent "
"%(agent_id)s"),
{'device': device, 'agent_id': agent_id})
port_id = self._device_to_port_id(device)
session = db_api.get_session()
with session.begin(subtransactions=True):
port = db.get_port(session, port_id)
if not port:
LOG.warning(_("Device %(device)s requested by agent "
"%(agent_id)s not found in database"),
{'device': device, 'agent_id': agent_id})
return {'device': device}
segments = db.get_network_segments(session, port.network_id)
if not segments:
LOG.warning(_("Device %(device)s requested by agent "
"%(agent_id)s has network %(network_id)s with "
"no segments"),
{'device': device,
'agent_id': agent_id,
'network_id': port.network_id})
return {'device': device}
binding = db.ensure_port_binding(session, port.id)
if not binding.segment:
LOG.warning(_("Device %(device)s requested by agent "
"%(agent_id)s on network %(network_id)s not "
"bound, vif_type: %(vif_type)s"),
{'device': device,
'agent_id': agent_id,
'network_id': port.network_id,
'vif_type': binding.vif_type})
return {'device': device}
segment = self._find_segment(segments, binding.segment)
if not segment:
LOG.warning(_("Device %(device)s requested by agent "
"%(agent_id)s on network %(network_id)s "
"invalid segment, vif_type: %(vif_type)s"),
{'device': device,
'agent_id': agent_id,
'network_id': port.network_id,
'vif_type': binding.vif_type})
return {'device': device}
new_status = (q_const.PORT_STATUS_BUILD if port.admin_state_up
else q_const.PORT_STATUS_DOWN)
if port.status != new_status:
port.status = new_status
entry = {'device': device,
'network_id': port.network_id,
'port_id': port.id,
'admin_state_up': port.admin_state_up,
'network_type': segment[api.NETWORK_TYPE],
'segmentation_id': segment[api.SEGMENTATION_ID],
'physical_network': segment[api.PHYSICAL_NETWORK]}
LOG.debug(_("Returning: %s"), entry)
return entry
def _find_segment(self, segments, segment_id):
for segment in segments:
if segment[api.ID] == segment_id:
return segment
def update_device_down(self, rpc_context, **kwargs):
"""Device no longer exists on agent."""
# TODO(garyk) - live migration and port status
agent_id = kwargs.get('agent_id')
device = kwargs.get('device')
LOG.debug(_("Device %(device)s no longer exists at agent "
"%(agent_id)s"),
{'device': device, 'agent_id': agent_id})
port_id = self._device_to_port_id(device)
plugin = manager.NeutronManager.get_plugin()
port_exists = plugin.update_port_status(rpc_context, port_id,
q_const.PORT_STATUS_DOWN)
return {'device': device,
'exists': port_exists}
def update_device_up(self, rpc_context, **kwargs):
"""Device is up on agent."""
agent_id = kwargs.get('agent_id')
device = kwargs.get('device')
LOG.debug(_("Device %(device)s up at agent %(agent_id)s"),
{'device': device, 'agent_id': agent_id})
port_id = self._device_to_port_id(device)
plugin = manager.NeutronManager.get_plugin()
plugin.update_port_status(rpc_context, port_id,
q_const.PORT_STATUS_ACTIVE)
class AgentNotifierApi(proxy.RpcProxy,
sg_rpc.SecurityGroupAgentRpcApiMixin,
type_tunnel.TunnelAgentRpcApiMixin):
"""Agent side of the openvswitch rpc API.
API version history:
1.0 - Initial version.
1.1 - Added get_active_networks_info, create_dhcp_port,
update_dhcp_port, and removed get_dhcp_port methods.
"""
BASE_RPC_API_VERSION = '1.1'
def __init__(self, topic):
super(AgentNotifierApi, self).__init__(
topic=topic, default_version=self.BASE_RPC_API_VERSION)
self.topic_network_delete = topics.get_topic_name(topic,
topics.NETWORK,
topics.DELETE)
self.topic_port_update = topics.get_topic_name(topic,
topics.PORT,
topics.UPDATE)
def network_delete(self, context, network_id):
self.fanout_cast(context,
self.make_msg('network_delete',
network_id=network_id),
topic=self.topic_network_delete)
def port_update(self, context, port, network_type, segmentation_id,
physical_network):
self.fanout_cast(context,
self.make_msg('port_update',
port=port,
network_type=network_type,
segmentation_id=segmentation_id,
physical_network=physical_network),
topic=self.topic_port_update)
| apache-2.0 | -6,509,788,417,810,741,000 | 42.269406 | 79 | 0.550232 | false |
eXma/meet-and-eat-registration-system | src/cfg/__init__.py | 1 | 2351 | import os
from contextlib import contextmanager
import yaml
import locale
from datetime import datetime
__author__ = 'jan'
def parse_cfg_date(cfg_date):
return datetime.strptime(cfg_date, "%Y-%m-%d %H:%M")
@contextmanager
def _fix_locale(prefix):
oldloc = locale.getlocale(locale.LC_TIME)
if not oldloc[0] == prefix:
tried = []
for suffx in ("", ".UTF8", ".ISO-8859-1", "@euro"):
tried.append(prefix + suffx)
try:
locale.setlocale(locale.LC_TIME, prefix + suffx)
yield
locale.setlocale(locale.LC_TIME, oldloc)
return
except locale.Error:
pass
raise Exception("Cannot set locale with prefix %s. Tried: %s" % (prefix,
", ".join(tried)))
else:
yield
def pretty_date(date, month_name=False, show_year=False, with_weekday=False):
"""Pretty print the date
:type date: datetime
"""
format = ["%d."]
if month_name:
format.append(" %B ")
else:
format.append("%m.")
if show_year:
format.append("%Y")
if with_weekday:
format = ["%A, den "] + format
with _fix_locale("de_DE"):
pretty = date.strftime("".join(format).strip())
return pretty
class GlobalConfig(object):
def __init__(self):
self.data = None
def initialize(self, data):
self.data = data
def clear(self):
self.data = None
def loaded(self):
return self.data is not None
def __getattr__(self, item):
assert self.data is not None, "No configuration loaded!"
if item not in self.data:
raise AttributeError, item
return self.data[item]
def __getitem__(self, key):
assert self.data is not None, "No configuration loaded!"
if key not in self.data:
raise KeyError, key
return self.data[key]
config = GlobalConfig()
def load_config(fname=None):
if fname is None:
fname = os.getenv("CONFIG_FILE_PATH", None)
assert fname is not None, "No config file set!"
assert os.path.exists(fname), "Config file %s does not exist" % fname
with open(fname, "r") as fn:
data = yaml.load(fn)
config.initialize(data)
| bsd-3-clause | -6,858,246,530,555,979,000 | 23.237113 | 91 | 0.560613 | false |
DarthMaulware/EquationGroupLeaks | Leak #4 - Don't Forget Your Base/EQGRP-Auction-File/Linux/bin/fg.py | 1 | 23049 | #!/usr/local/bin/python
# VER=2.0.0.2
# 09 FEB 2012
"""
fg UTILITIES
requires:
+ winscp for win32
+ pexpect 2.3 on linux
"""
import re, sys, time, os, getpass, string, traceback
from os import popen
from optparse import OptionParser
from subprocess import *
try:
import pexpect
except:
pass
class fg:
def __init__(self, userLogin, userID, userPassword, server, **kwargs):
"""
Initializes class setup some variables.
fg = fg(userLogin, userID, userPassword, server, kwargs[sharedDIRBool, userDIRBool, diskDIRBool, fileWildcard, debugBool, timeout, privKeyFile])
"""
self.sharedDIRBool = self.userDIRBool = self.diskDIRBool = False
self.fileWildcard = ""
self.debugBool = False
self.timeout = 120
#determine OS
self.platform = sys.platform
if self.debugBool: print "Running on %s" % self.platform
self.userLogin = userLogin
self.userID = userID
self.userPassword = userPassword
self.server = server
self.remoteDir = ""
self.destDir = "."
#self.privKeyFile = privKeyFile
if kwargs.__contains__("sharedDIRBool"):
self.sharedDIRBool = kwargs["sharedDIRBool"]
if self.sharedDIRBool: self.remoteDir = "/data/shared/"
if kwargs.__contains__("userDIRBool"):
self.userDIRBool = kwargs["userDIRBool"]
if self.userDIRBool: self.remoteDir = "/data/users/" + self.userID + "/"
if kwargs.__contains__("diskDIRBool"):
self.diskDIRBool = kwargs["diskDIRBool"]
if self.diskDIRBool: self.remoteDir = "/data/gc/"
if kwargs.__contains__("privKeyFile"):
self.privKeyFile = kwargs["privKeyFile"]
if kwargs.__contains__("fileWildcard"):
self.fileWildcard = kwargs["fileWildcard"]
self.debugBool = kwargs["debugBool"]
self.timeout = int(kwargs["timeout"])
#ask for a password if the user didn't specify one or a privKeyFile
if not self.userPassword and not self.privKeyFile:
self.userPassword = self.setPass()
if not self.userID:
print "USER ID NOT SET!!"
exit(0)
if not os.path.isfile(self.privKeyFile):
print bcolors.BOLD + bcolors.FAIL + "\n\t[!] Key file does not exist: " + self.privKeyFile + bcolors.ENDC + "\n\n"
sys.stdout.flush()
exit(0)
#this is the host key for the server to SSH into, needed for winscp
self.host_key = "ssh-rsa 2048 xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx"
if(self.platform == "linux2"):
self.sshKeys = [
'authenticity',
'assword:',
'denied',
'No such file or directory',
'100%',
'ETA',
pexpect.EOF,
'Permission denied',
'total '
]
self.sftpKeys = [
'authenticity',
'assword:',
'denied',
pexpect.EOF,
'sftp>',
'Connecting to'
]
#--------------------------------
def setPass(self):
"""
Prompts the user for a password if this class was not passed the password by another script
"""
print "\n"
userPassword = getpass.getpass()
if self.debugBool: print "Password set: %s" % (userPassword)
print "\n\n"
return(userPassword)
#--------------------------------
def fgAutoGet(self):
"""
Automatically gets the files. Does a dir, displays the file list, prompts user for all, #, or wildcard get
"""
#if self.debugBool: print "Using options: %s --> %s" % (self.type, self.userLogin)
if(self.platform == "win32"):
# list the files then display them to the user
print "AUTO GET FILES WIN32"
print "===================================="
#cmd = 'cmd.exe /c winscp ' + self.userLogin + ":" + self.userPassword + '@' + self.server + " -hostkey\=\"" + self.host_key + "\" /command \"option confirm off\" \"get " + self.remoteDir + self.fileWildcard + "* " + self.destDir + "\ \" exit \n"
#cmdnopass = 'cmd.exe /c winscp ' + self.userLogin + ":" + "<PASSWORD>" + '@' + self.server + " -hostkey\=\"" + self.host_key + "\" /command \"option confirm off\" \"get " + self.remoteDir + self.fileWildcard + "* " + self.destDir + "\ \" exit \n"
cmd = 'cmd.exe /c winscp ' + "/console /command \"open " + self.userLogin + ":" + self.userPassword + '@' + self.server + "\" \"option confirm off\" \"get " + self.remoteDir + self.fileWildcard + "* " + self.destDir + "\ \" exit" + " -hostkey\=\"" + self.host_key
print cmd
#print "SENDING COMMAND: %s" % cmdnopass
#output = fg.winRunIt(cmd)
#print "\t[+] " + output.strip()
elif(self.platform == "linux2"):
print "AUTO GET FILES LINUX"
additionalArgs=""
#If we need to pass some additional args, do so here
if (self.privKeyFile):
additionalArgs= '-i ' + self.privKeyFile + ' '
if (self.fileWildcard[0]=='^'):
cmd = 'scp ' + str(additionalArgs) + self.userLogin + '@' + self.server + ':' + self.remoteDir + self.fileWildcard.lstrip('^') + "* " + self.destDir
else:
cmd = 'scp ' + str(additionalArgs) + self.userLogin + '@' + self.server + ':' + self.remoteDir + "*" + self.fileWildcard + "* " + self.destDir
print "===================================="
print "\t" + cmd
try:
outputChild = fg.nixRunIt(cmd, self.sshKeys)
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + "\n\t[!] " + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
#--------------------------------
def fgManualGet(self):
"""
Provides the user with a list of files then gets the user selected files.
"""
file_re = re.compile(r"^[drwx-]+\s", re.IGNORECASE | re.VERBOSE)
if(self.platform == "win32"):
#cd into directory then dir
print "====================================\n"
print " SORRY NOT WORKING YET! PUNT!"
exit(0)
#cmd = 'cmd.exe /c winscp ' + self.userLogin + ":" + self.userPassword + '@' + self.server + " -hostkey\=\"" + self.host_key + "\" /command \"cd " + self.remoteDir + "\" dir exit \n"
#output = fg.winRunIt(cmd)
elif(self.platform == "linux2"):
additionalArgs=""
#If we need to pass some additional args, do so here
if (self.privKeyFile):
additionalArgs= '-oIdentityFile=' + self.privKeyFile + ' '
# TODO, implement this with sftp: sftp -oIdentityFile=/root/testKey op@server
sftpCmd = 'sftp ' + str(additionalArgs) + self.userLogin + '@' + self.server
sftpRunCmd='ls -l ' + self.remoteDir
print sftpCmd + " THEN RUNNING " + sftpRunCmd
print "===================================="
try:
#outputChild = fg.sftpRunCmd(sftpCmd,sftpRunCmd, self.sftpKeys)
result = fg.sftpRunCmd(sftpCmd,sftpRunCmd, self.sftpKeys)
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + "\n\t[!] " + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
#lines = string.split(str(outputChild.before), "\r\n")
#outputChild.close()
#result = string.split(str(outputChild.before), "\r\n")
lines = string.split(str(result), "\r\n")
fileList = {}
print "\t[+] Getting list of files...\n"
for line in lines:
if file_re.match(line):
filename = re.split('\s+', line)
nf = string.strip(filename[len(filename)-1])
nftype = string.strip(filename[0])
if not (nf == "." or nf == ".."):
fileList[nf] = nftype
cnt = 1
keys = fileList.keys()
keys.sort()
fileList2 = {}
for key in keys:
print "\t[%3s] %10s %s" % (cnt, fileList[key], key)
fileList2[cnt] = [key, fileList[key]]
cnt = cnt + 1
if cnt > 1:
print "Please select file(s) to copy: (\"all\" | num,[num...] | part of the filename) q = quit"
filesget = raw_input('-->')
print "====================================\n"
else:
print "NO FILES WAITING! SKIPPING PROMPT!"
filesget = "quit"
if filesget == "q" or filesget == "quit":
exit(0)
elif filesget == "all":
#get all files
for key in keys:
cmd = "scp " + str(additionalArgs) + self.userLogin + "@" + self.server + ":" + self.remoteDir + key + " " + self.destDir
print "\t[+] " + cmd
try:
outputChild = fg.nixRunIt(cmd, self.sshKeys)
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + "\n\t[!] " + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
print "\t======="
#get #,# | # #
elif re.match("[0-9\,]+", filesget):
filesget = filesget.replace(", ", ",")
tmpF = re.split(",|\s", filesget)
for i in tmpF:
#catch error when user put in number out of index, or not an INT
if str(i).isdigit() and int(i) <= int(len(keys)):
cmd = "scp " + str(additionalArgs) + self.userLogin + "@" + self.server + ":" + self.remoteDir + str(fileList2[int(i)][0]) + " " + self.destDir
print "\t[+] " + cmd
try:
outputChild = fg.nixRunIt(cmd, self.sshKeys)
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + "\n\t[!] " + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
print "\t======="
else:
#raise CustomException("\t[!] BAD USER INPUT FORMAT! - %s, MALFORMED CHARACTER OR INDEX OUT OF BOUNDS!!" % i)
if str(i).isdigit() and int(i) > int(len(keys)):
#try a wildcard get on the file even though it is an integer before bailing out
getFileStr = "*" +str(i) + "*"
cmd = "scp " + str(additionalArgs) + self.userLogin + "@" + self.server + ":" + self.remoteDir + getFileStr + " " + self.destDir
print "\t[+] " + cmd
try:
#TODO properly handle the output for when this matches multiple files (it works it just doesn't show all the files that got copied)
outputChild = fg.nixRunIt(cmd, self.sshKeys)
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + "You either entered a number that was invalid or a filename with digits only which apparently wasn't on the server"
print bcolors.BOLD + bcolors.FAIL + "\n\t[!] " + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
#print bcolors.BOLD + bcolors.FAIL + "\t[!] BAD USER INPUT! <" + str(i) + "> INDEX OUT OF BOUNDS, SKIPPING TO NEXT ONE..." + bcolors.ENDC
#print "\t======="
else:
print bcolors.BOLD + bcolors.FAIL + "\t[!] NO IDEA WHAT YOU DID! <" + str(i) + ">, SKIPPING TO NEXT ONE..." + bcolors.ENDC
print "\t======="
#get filename match
#TODO fixup case where string is given that doesn't match ( ie someone accidentally types filename,1,3 )
elif re.match('\w+', filesget):
for key in keys:
if re.search(filesget, key, re.IGNORECASE | re.VERBOSE):
cmd = "scp " + str(additionalArgs) + self.userLogin + "@" + self.server + ":" + self.remoteDir + key + " " + self.destDir
print "\t[+] " + cmd
try:
outputChild = fg.nixRunIt(cmd, self.sshKeys)
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + "\n\t[!] " + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
print "\t======="
#This seems to not be needed
#elif (keys=1): #if we get througnall keys and no match:
# print "DEBUGGING key " + key + " keys " + str(keys) + " filesget " + filesget
# raise CustomException("\t[!] FILE MATCH NOT FOUND! - THINK ABOUT WHAT YOU WANT THEN TRY AGAIN!!")
else:
raise CustomException("\t[!] BAD USER INPUT FORMAT! - THINK ABOUT WHAT YOU WANT THEN TRY AGAIN!!")
#--------------------------------
def winRunIt(self, cmd):
"""
Run a command
"""
pass
#print "Running " + cmd
#p1 = Popen(cmd, stdout=PIPE, stderr=PIPE)
#output = p1.communicate()[0]
#erroutput = p1.communicate()[1]
#p1.wait()
#return output
#--------------------------------
def sftpRunCmd(self, sftpConnectCmd, sftpCommand, expectKeys):
child = pexpect.spawn(sftpConnectCmd, timeout=self.timeout,)
seen = child.expect(expectKeys)
workedB = False
printWorkedCNT = 0
cnt = 0
cnt2 = 0
#yup, this is a horrible duplication of code
while seen != 3:
#print "Debugging " + str(child)
cnt = cnt + 1
if printWorkedCNT == 1:
sys.stdout.write(bcolors.OKGREEN + "\r[OK]" + bcolors.ENDC + "\n")
sys.stdout.flush()
sys.stdout.write("\t[+] RUNNING COMMAND [ " + sftpConnectCmd + " ]")
sys.stdout.flush()
#~~~~~~~~~~~~~~~
#authenticty
if seen == 0:
sys.stdout.write("\t[+] ACCEPTING RSA KEY...")
sys.stdout.flush()
child.sendline('yes')
seen = child.expect(expectKeys)
sys.stdout.write(bcolors.OKGREEN + "\r[OK]" + bcolors.ENDC + "\n")
sys.stdout.flush()
#assword:
if seen == 1:
child.sendline(self.userPassword)
if cnt2 < 1:
sys.stdout.write("\t[+] AUTHENTICATING WITH SSH SERVER...")
sys.stdout.flush()
else:
if cnt2 == 1:
sys.stdout.write("\r|")
sys.stdout.flush()
if cnt2 == 2:
sys.stdout.write("\r/")
sys.stdout.flush()
if cnt2 == 3:
sys.stdout.write("\r-")
sys.stdout.flush()
if cnt2 == 4:
sys.stdout.write("\r\\")
sys.stdout.flush()
cnt2 = 0
cnt2 = cnt2 + 1
seen = child.expect(expectKeys)
#sftp>
if seen == 4:
sys.stdout.write(bcolors.OKGREEN + "\r[OK]" + bcolors.ENDC + "\n")
sys.stdout.flush()
print "Sending command " + sftpCommand
sys.stdout.flush()
child.sendline(sftpCommand)
seen = child.expect(expectKeys)
sys.stdout.write(bcolors.OKGREEN + "\r[OK]" + bcolors.ENDC + "\n")
sys.stdout.flush()
workedB = True
#print "DEBUGGING case 4 " + str(child)
result=str(child.before)
#now quit and cleanup
child.sendline("quit")
seen = child.expect(expectKeys)
child.close()
return result
#Connecting to ...
if seen == 5:
print "Connecting to server"
seen = child.expect(expectKeys)
if workedB:
sys.stdout.write(bcolors.OKGREEN + "\r[OK]" + bcolors.ENDC + "\n")
sys.stdout.flush()
sys.stdout.write(bcolors.OKGREEN + "[OK]" + bcolors.ENDC + "\t[+] SESSION COMPLETE!\n")
sys.stdout.flush()
else:
print bcolors.BOLD + bcolors.FAIL + "\n\t[!] CONNECTION ERROR - CHECK IP ADDRESS, USERNAME, OR PASSWORD\n\n"
sys.stdout.flush()
#seen = child.expect(expectKeys)
return(child)
#--------------------------------
def nixRunIt(self, cmd, expectKeys):
"""
Controls Pexpect for
"""
child = pexpect.spawn(cmd, timeout=self.timeout,)
seen = child.expect(expectKeys)
workedB = False
printWorkedCNT = 0
cnt = 0
cnt2 = 0
while seen != 6:
cnt = cnt + 1
if printWorkedCNT == 1:
sys.stdout.write(bcolors.OKGREEN + "\r[OK]" + bcolors.ENDC + "\n")
sys.stdout.flush()
sys.stdout.write("\t[+] RUNNING COMMAND [ " + cmd + " ]")
sys.stdout.flush()
#~~~~~~~~~~~~~~~
#authenticty
if seen == 0:
sys.stdout.write("\t[+] ACCEPTING RSA KEY...")
sys.stdout.flush()
child.sendline('yes')
seen = child.expect(expectKeys)
sys.stdout.write(bcolors.OKGREEN + "\r[OK]" + bcolors.ENDC + "\n")
sys.stdout.flush()
#assword:
if seen == 1:
child.sendline(self.userPassword)
if cnt2 < 1:
sys.stdout.write("\t[+] AUTHENTICATING WITH SSH SERVER...")
sys.stdout.flush()
else:
if cnt2 == 1:
sys.stdout.write("\r|")
sys.stdout.flush()
if cnt2 == 2:
sys.stdout.write("\r/")
sys.stdout.flush()
if cnt2 == 3:
sys.stdout.write("\r-")
sys.stdout.flush()
if cnt2 == 4:
sys.stdout.write("\r\\")
sys.stdout.flush()
cnt2 = 0
cnt2 = cnt2 + 1
seen = child.expect(expectKeys)
#denied:
if seen == 2:
workedB = False
child.kill(0)
raise CustomException("ACCESS DENIED! - CHECK USERNAME OR PASSWORD\n\n\t!! IF YOU SEE A DIALOG BOX CLOSE PRESS CANCEL !!")
#'No such file or directory',
if seen == 3:
#workedB = False
child.kill(0)
sys.stdout.write(bcolors.OKGREEN + "\r[OK]" + bcolors.ENDC + "\n")
sys.stdout.flush()
raise CustomException("FILE MATCH NOT FOUND! - MAYBE THERE ARE NO FILES WAITING FOR YOU ON THE SERVER?")
#100%
if seen == 4:
printWorkedCNT = printWorkedCNT + 1
workedB = True
sys.stdout.write(bcolors.OKGREEN + "\r[OK]" + bcolors.ENDC + "\n")
sys.stdout.flush()
sys.stdout.write("\t")
sys.stdout.flush()
tmpStr = str(child.before)
tmpStr = tmpStr.replace("\r", "")
tmpStr = tmpStr.replace("\d", "")
tmpStr = tmpStr.replace("\n", "")
sys.stdout.write(tmpStr)
sys.stdout.flush()
seen = child.expect(expectKeys)
#ETA
if seen == 5:
printWorkedCNT = printWorkedCNT + 1
workedB = True
if cnt == 1:
sys.stdout.write("\r|")
sys.stdout.flush()
if cnt == 2:
sys.stdout.write("\r/")
sys.stdout.flush()
if cnt == 3:
sys.stdout.write("\r-")
sys.stdout.flush()
if cnt == 4:
sys.stdout.write("\r\\")
sys.stdout.flush()
cnt = 1
seen = child.expect(expectKeys)
#Permission denied
if seen == 7:
workedB = False
child.kill(0)
raise CustomException("ACCESS DENIED! - CHECK USERNAME OR PASSWORD\n\n\t!! IF YOU SEE A DIALOG BOX CLOSE PRESS CANCEL !!")
workedB = True
#total (result from an ls when a key is used versus password authentication)
if seen == 8:
wokedB = True
sys.stdout.write("\t[+] REMOTE LISTING COMPLETE.")
sys.stdout.flush()
seen = child.expect(expectKeys)
if workedB:
sys.stdout.write(bcolors.OKGREEN + "\r[OK]" + bcolors.ENDC + "\n")
sys.stdout.flush()
sys.stdout.write(bcolors.OKGREEN + "[OK]" + bcolors.ENDC + "\t[+] SESSION COMPLETE!\n")
sys.stdout.flush()
else:
print bcolors.BOLD + bcolors.FAIL + "\n\t[!] CONNECTION ERROR - CHECK IP ADDRESS, USERNAME, OR PASSWORD\n\n"
sys.stdout.flush()
#seen = child.expect(expectKeys)
return(child)
#--------------------------------
class CustomException(Exception):
"""
Custom Exceptions...kinda
"""
def __init__(self, value):
self.parameter = value
def __str__(self):
return repr(self.parameter)
#--------------------------------
class bcolors:
"""
Pretty colors on the console
"""
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
def disable(self):
self.HEADER = ''
self.OKBLUE = ''
self.OKGREEN = ''
self.WARNING = ''
self.FAIL = ''
self.BOLD = ''
self.ENDC = ''
#--------------------------------
if(__name__ == "__main__"):
"""
Main
"""
# setup args
VER = '2.0.0.1'
parser = OptionParser(usage='%prog -l <USERLOGIN> -u <USERID> -p <USERPASS> -s <SERVER> (--sharedDIR|--userDIR|--diskDIR) [-f PART_OF_FILENAME]', add_help_option = True)
#connection info
parser.add_option("-v", dest="versionB", action="store_true", default=False)
parser.add_option("-l", "--LoginUser", dest="userLogin", help="Your server login username")
parser.add_option("-u", "--userID", dest="userID", help="Your user ID number")
parser.add_option("-p", "--pass", dest="userPassword", default=None, help="Your password")
parser.add_option("-s", "--server", dest="server", help="The server to connect to")
#types
parser.add_option("--sharedDIR", dest="sharedDIRBool", action="store_true", default=False, help="Get files from shared directory")
parser.add_option("--userDIR", dest="userDIRBool", action="store_true", default=False, help="Get files from user directory")
parser.add_option("--diskDIR", dest="diskDIRBool", action="store_true", default=False, help="Get files from disk directory")
parser.add_option("-f", "--file", dest="fileWildcard", default=None, help="Get files with this wildcard; REGEX used => .*YOURTEXT.*")
parser.add_option("-i", "--privKeyFile", dest="privKeyFile", default=None, help="Keyfile to use for server authentication")
parser.add_option("--debug", dest="debugBool", action="store_true", default=False, help="Prints more stuff to the screen")
parser.add_option("--timeout", dest="timeout", default=120, help="Overrides the timeout for ssh sessions to server")
(options, sys.argv) = parser.parse_args(sys.argv)
#print "login:" + options.userLogin + "\nuser:" + options.userID + "\npass:" + options.userPassword + "\nserver:" + options.server + "\nshared:" + str(options.sharedDIRBool) + "\nuser:" + str(options.userDIRBool) + "\ndisk:" + str(options.diskDIRBool) + "\nwildcard:" + str(options.fileWildcard) + "\ndebug:" + str(options.debugBool) + "\ntimeout:" + str(options.timeout)
if options.versionB:
print VER
exit(0)
#User must put in one of these options or fail!
if not(options.sharedDIRBool or options.userDIRBool or options.diskDIRBool):
print "\n\n!!! DID NOT SPECIFY TYPE !!!\n\t[--sharedDIR | --userDIR | --diskDIR]\n\n"
exit(0)
try:
fg = fg(options.userLogin, options.userID, options.userPassword, options.server, sharedDIRBool=options.sharedDIRBool, userDIRBool=options.userDIRBool, diskDIRBool=options.diskDIRBool, fileWildcard=options.fileWildcard, debugBool=options.debugBool, timeout=options.timeout, privKeyFile=options.privKeyFile)
except:
print "\n\n!!! FG EXCEPTION !!!\n!!! CHECK USAGE !!!"
print "usage: fg.py -l <USERLOGIN> -u <USERID> -p <USERPASS> -s <SERVER> (--sharedDIR|--userDIR|--diskDIR) [-f PART_OF_FILENAME]\n\n"
try:
raise CustomException("ACCESS DENIED! - CHECK USERNAME OR PASSWORD\n\n\t!! IF YOU SEE A DIALOG BOX CLOSE PRESS CANCEL !!")
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + instance.parameter + bcolors.ENDC + "\n\n"
if options.debugBool: print sys.exc_info()
if options.debugBool: print str(traceback.tb_lineno(sys.exc_traceback))
exit(0)
#shared
if options.sharedDIRBool:
if options.debugBool: print "SHARED!!"
if options.fileWildcard:
print "AUTO GET WITH WILDCARD %s" % options.fileWildcard
try:
fg.fgAutoGet()
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
else:
print "PROMPT USER FILENAMES TO GET"
try:
fg.fgManualGet()
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
#user
elif options.userDIRBool:
if options.debugBool: print "USER_DIR!!"
if options.fileWildcard:
print "AUTO GET WITH WILDCARD %s" % options.fileWildcard
try:
fg.fgAutoGet()
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
else:
print "PROMPT USER FILENAMES TO GET"
try:
fg.fgManualGet()
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
#disks
elif options.diskDIRBool:
if options.debugBool: print "DISK!!"
if options.fileWildcard:
print "AUTO GET WITH WILDCARD %s" % options.fileWildcard
try:
fg.fgAutoGet()
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
else:
print "PROMPT USER FILENAMES TO GET"
try:
fg.fgManualGet()
except CustomException, (instance):
print bcolors.BOLD + bcolors.FAIL + instance.parameter + bcolors.ENDC + "\n\n"
exit(0)
print "\n\n\n"
#----------------------------------
| unlicense | 1,231,811,236,877,711,400 | 35.412322 | 372 | 0.613302 | false |
lioupayphone/gdeploy | gdeployfeatures/volume/volume.py | 1 | 12197 | """
Add functions corresponding to each of the actions in the json file.
The function should be named as follows <feature name>_<action_name>
"""
from gdeploylib import defaults, Helpers, Global, YamlWriter
import os, re
from os.path import basename
from collections import defaultdict
helpers = Helpers()
writers = YamlWriter()
def volume_create(section_dict):
global helpers
Global.ignore_errors = section_dict.get('ignore_volume_errors')
section_dict['volname'] = helpers.split_volume_and_hostname(
section_dict['volname'])
if Global.trace:
Global.logger.info("Splitting volume and hostnames")
if not section_dict.get('brick_dirs'):
section_dict = get_common_brick_dirs(section_dict)
if Global.trace:
Global.logger.info("Retrieving common brick directories among hosts.")
else:
section_dict = validate_brick_dirs(section_dict, 'brick_dirs')
if Global.trace:
Global.logger.info("Error in retrieving brick directories"\
" Validating brick directories.")
section_dict['service'] = 'glusterd'
section_dict['state'] = 'started'
Global.current_hosts = helpers.unique(Global.current_hosts)
section_dict['hosts'] = Global.current_hosts
yamls = [defaults.SERVICE_MGMT, defaults.CREATEDIR_YML]
if Global.trace:
Global.logger.info("Executing yamls %s and %s."\
% (defaults.SERVICE_MGMT, defaults.CREATEDIR_YML))
ret = call_peer_probe(section_dict)
if ret:
section_dict = ret
yamls.append(defaults.PROBE_YML)
if Global.trace:
Global.logger.info("Executing %s."% defaults.PROBE_YML)
yamls.append(defaults.VOLCREATE_YML)
if Global.trace:
Global.logger.info("Executing %s."% defaults.VOLCREATE_YML)
section_dict, set_yml = volume_set(section_dict)
if set_yml:
yamls.append(set_yml)
section_dict, start_yml = volume_start(section_dict)
yamls.append(start_yml)
sdict, yml = get_smb_data(section_dict)
if Global.trace:
Global.logger.info("Checking if Samba is enabled on volume.")
if sdict:
yml = helpers.listify(yml)
section_dict = sdict
yamls.extend(yml)
if type(section_dict['transport']) is list:
section_dict['transport'] = ','.join(section_dict['transport'])
# Configure SSL on the volume if enable_ssl is set.
if section_dict['enable_ssl'].lower() == "yes":
if section_dict.has_key('ssl_clients'):
section_dict['ssl_hosts'] = list(set(helpers.listify
(section_dict['ssl_clients'])\
+ Global.hosts))
else:
section_dict['ssl_hosts'] = list(set(Global.hosts))
section_dict['ssl_allow_list'] = ','.join(section_dict['ssl_hosts'])
section_dict['ssl_base_dir'] = Global.base_dir
helpers.write_to_inventory('ssl_hosts', section_dict['ssl_hosts'])
# Enable SSL on the volume
yamls.append(defaults.ENABLE_SSL)
if Global.trace:
Global.logger.info("Executing %s."% defaults.ENABLE_SSL)
return section_dict, yamls
def get_smb_data(section_dict):
smb = section_dict.get('smb')
if smb:
if smb.lower() == 'yes':
return volume_smb_setup(section_dict)
elif smb.lower() == 'no':
return volume_smb_disable(section_dict)
return False, False
def call_peer_probe(section_dict):
global helpers
peer_action = helpers.config_section_map(
'peer', 'action', False) or 'True'
if peer_action != 'ignore':
to_be_probed = Global.current_hosts + Global.brick_hosts
to_be_probed = helpers.unique(to_be_probed)
section_dict['to_be_probed'] = to_be_probed
return section_dict
return False
def get_common_brick_dirs(section_dict):
global helpers, writers
f_brick_list, brick_name = [], []
host_files = os.listdir(Global.host_vars_dir)
for host in host_files:
filename = helpers.get_file_dir_path(Global.host_vars_dir,
host)
ret = read_brick_dir_from_file(filename)
if not ret:
continue
brick_list, brick_name = ret
check_brick_name_format(brick_name)
writers.create_yaml_dict('brick_dirs', sorted(
set(brick_name)), filename)
Global.brick_hosts.append(host)
f_brick_list.extend(brick_list)
if set(Global.current_hosts) - set(Global.brick_hosts):
ret = read_brick_dir_from_file(Global.group_file)
if ret:
brick_list, brick_name = ret
check_brick_name_format(brick_name)
f_brick_list.extend(brick_list)
section_dict['brick_dirs'] = helpers.unique(brick_name)
else:
print "\nError: 'brick_dirs' not provided for all the "\
"hosts."
helpers.cleanup_and_quit()
section_dict['mountpoints'] = helpers.unique(f_brick_list)
return section_dict
def read_brick_dir_from_file(filename):
global helpers, writers
brick_list, brick_name = [], []
if basename(filename) == 'all':
hostlist = Global.current_hosts
else:
hostlist = [basename(filename)]
if helpers.is_present_in_yaml(filename, 'mountpoints'):
brick_name = helpers.get_value_from_yaml(filename,
'mountpoints')
for each in brick_name:
brick_list.extend([host + ':' + each for host in
hostlist])
return (brick_list, brick_name)
return False
def validate_brick_dirs(section_dict, section):
global helpers, writers
brick_list, brick_name = [], []
brick_dict = {}
brick_dict = defaultdict(lambda: [], brick_dict)
brick_dirs = helpers.listify(section_dict[section])
for brick in brick_dirs:
bpat = re.match('(.*):(.*)', brick)
if not bpat:
if not Global.hosts:
print "Please provide the brick_dirs in the format " \
"<hostname>:<brick_dir name>"
helpers.cleanup_and_quit()
brick_list.extend([host + ':' + brick for host in
Global.hosts])
brick_name.append(brick)
else:
brick_list.append(brick)
brick_name.append(bpat.group(2))
brick_dict[bpat.group(1)].append(bpat.group(2))
if bpat.group(1) not in Global.brick_hosts:
Global.brick_hosts.append(bpat.group(1))
if brick_dict:
for host, bname in zip(brick_dict.keys(), brick_dict.values()):
filename = helpers.get_file_dir_path(Global.host_vars_dir, host)
helpers.touch_file(filename)
helpers.create_yaml_dict('brick_dirs', bname, filename)
check_brick_name_format(brick_name)
section_dict['brick_dirs'] = helpers.unique(brick_name)
section_dict['mountpoints'] = helpers.unique(brick_list)
return section_dict
def check_brick_name_format(brick_name):
global helpers
if False in [brick.startswith('/') for brick in
helpers.unique(brick_name)]:
msg = "values to 'brick_dirs' should be absolute"\
" path. Relative given. Exiting!"
print msg
helpers.cleanup_and_quit()
return
def volume_delete(section_dict):
global helpers
Global.ignore_errors = section_dict.get('ignore_volume_errors')
section_dict['volname'] = helpers.split_volume_and_hostname(
section_dict['volname'])
return section_dict, defaults.VOLDEL_YML
def volume_start(section_dict):
global helpers
Global.ignore_errors = section_dict.get('ignore_volume_errors')
section_dict['volname'] = helpers.split_volume_and_hostname(
section_dict['volname'])
return section_dict, defaults.VOLUMESTART_YML
def volume_stop(section_dict):
global helpers
Global.ignore_errors = section_dict.get('ignore_volume_errors')
section_dict['volname'] = helpers.split_volume_and_hostname(
section_dict['volname'])
return section_dict, defaults.VOLSTOP_YML
def volume_add_brick(section_dict):
global helpers
Global.ignore_errors = section_dict.get('ignore_volume_errors')
yamls = []
section_dict['volname'] = helpers.split_volume_and_hostname(
section_dict['volname'])
section_dict = validate_brick_dirs(section_dict, 'bricks')
ret = call_peer_probe(section_dict)
if ret:
section_dict = ret
yamls.append(defaults.PROBE_YML)
yamls.append(defaults.ADDBRICK_YML)
return section_dict, yamls
def volume_remove_brick(section_dict):
global helpers
Global.ignore_errors = section_dict.get('ignore_volume_errors')
section_dict['volname'] = helpers.split_volume_and_hostname(
section_dict['volname'])
section_dict['old_bricks'] = section_dict.pop('bricks')
return section_dict, defaults.REMOVEBRK_YML
def volume_rebalance(section_dict):
global helpers
Global.ignore_errors = section_dict.get('ignore_volume_errors')
section_dict['volname'] = helpers.split_volume_and_hostname(
section_dict['volname'])
return section_dict, [defaults.VOLUMESTART_YML,
defaults.REBALANCE_YML]
def volume_set(section_dict):
global helpers
Global.ignore_errors = section_dict.get('ignore_volume_errors')
section_dict['volname'] = helpers.split_volume_and_hostname(
section_dict['volname'])
keys = section_dict.get('key')
values = section_dict.get('value')
if not keys or not values:
return section_dict, ''
data = []
key = helpers.listify(keys)
value = helpers.listify(values)
# If values has a string and is colon or semicolon
# separated replace with comma
for idx, item in enumerate(value):
if type(item) == str and (item.__contains__(':')
or item.__contains__(';')):
value[idx] = item.replace(';', ',').replace(':', ',')
for k,v in zip(key, value):
names = {}
names['key'] = k
names['value'] = v
data.append(names)
section_dict['set'] = data
return section_dict, defaults.VOLUMESET_YML
def volume_smb_setup(section_dict):
global helpers
Global.ignore_errors = section_dict.get('ignore_volume_errors')
section_dict['volname'] = helpers.split_volume_and_hostname(
section_dict['volname'])
SMB_DEFAULTS = {
'glusterfs:logfile': '/var/log/samba/' +
section_dict['volname'] + '.log',
}
section_dict = helpers.set_default_values(section_dict, SMB_DEFAULTS)
options = ''
for key, value in SMB_DEFAULTS.iteritems():
if section_dict[key]:
options += key + ' = ' + str(section_dict[key]) + '\n'
section_dict['key'] = ['server.allow-insecure',
'storage.batch-fsync-delay-usec']
section_dict['value'] = ['on', 0]
section_dict, yml = volume_set(section_dict)
section_dict['service'] = 'glusterd'
section_dict['state'] = 'started'
return section_dict, [defaults.SERVICE_MGMT, yml, defaults.SMBREPLACE_YML,
defaults.SMBSRV_YML]
def volume_smb_disable(section_dict):
section_dict['key'] = "user.smb"
section_dict['value'] = "disable"
return volume_set(section_dict)
def volume_enable_ssl(section_dict):
"""
Enable ssl on an existing volume
"""
print "Ensure clients are unmounted before continuing. Add umount "\
"section in config."
if section_dict.has_key('ssl_clients'):
section_dict['ssl_hosts'] = list(set(section_dict['ssl_clients'] +
Global.hosts))
else:
section_dict['ssl_hosts'] = list(set(Global.hosts))
section_dict['ssl_allow_list'] = ','.join(section_dict['ssl_hosts'])
section_dict['ssl_base_dir'] = Global.base_dir
helpers.write_to_inventory('ssl_hosts', section_dict['ssl_hosts'])
# Enable SSL on the volume
return section_dict, [defaults.ENABLE_SSL]
| gpl-2.0 | -1,763,972,171,218,459,100 | 37.720635 | 81 | 0.618185 | false |
abdulhaq-e/django-rest-framework-json-api | example/tests/unit/test_renderers.py | 1 | 1308 | # from example.models import Entry, Comment
# from rest_framework_json_api import serializers, views
# from rest_framework_json_api.renderers import JSONRenderer
#
#
# # serializers
# class RelatedModelSerializer(serializers.ModelSerializer):
# class Meta:
# model = Comment
# fields = ('id',)
#
#
# class DummyTestSerializer(serializers.ModelSerializer):
# '''
# This serializer is a simple compound document serializer which includes only
# a single embedded relation
# '''
# related_models = RelatedModelSerializer(
# source='comment_set', many=True, read_only=True)
#
# class Meta:
# model = Entry
# fields = ('related_models',)
#
# class JSONAPIMeta:
# included_resources = ('related_models',)
#
#
# # views
# class DummyTestViewSet(views.ModelViewSet):
# queryset = Entry.objects.all()
# serializer_class = DummyTestSerializer
#
#
# def test_simple_reverse_relation_included_renderer():
# '''
# Test renderer when a single reverse fk relation is passed.
# '''
# serializer = DummyTestSerializer(instance=Entry())
# renderer = JSONRenderer()
# rendered = renderer.render(
# serializer.data,
# renderer_context={'view': DummyTestViewSet()})
#
# assert rendered
| bsd-2-clause | 7,519,365,847,582,278,000 | 28.066667 | 82 | 0.655963 | false |
LCBRU/reporter | reporter/uol_redcap_reports/limb/demographics_data_quality.py | 1 | 2538 | #!/usr/bin/env python3
import re
from reporter.connections import RedcapInstance
from reporter.emailing import (
RECIPIENT_LIMB_ADMIN as RECIPIENT_ADMIN,
RECIPIENT_LIMB_MANAGER as RECIPIENT_MANAGER,
)
from reporter.application_abstract_reports.redcap.data_quality import (
RedcapFieldMatchesRegularExpression,
RedcapInvalidDate,
RedcapInvalidNhsNumber,
RedcapInvalidUhlSystemNumber,
RedcapInvalidPostCode,
RedcapInvalidEmailAddress,
)
REDCAP_PROJECT_ID = 34
class LimbDemographicsRedcapStudyNumber(RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.uol_lamp,
project_id=REDCAP_PROJECT_ID,
fields=['record_id'],
regular_expression='^[A-Z]{2}\d{4}$',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class LimbDemographicsRedcapInvalidDate(RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.uol_lamp,
project_id=REDCAP_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class LimbDemographicsRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.uol_lamp,
project_id=REDCAP_PROJECT_ID,
fields=['nhs_no'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class LimbDemographicsRedcapInvalidUhlSystemNumber(RedcapInvalidUhlSystemNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.uol_lamp,
project_id=REDCAP_PROJECT_ID,
fields=['s_no'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class LimbDemographicsRedcapInvalidPostCode(RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.uol_lamp,
project_id=REDCAP_PROJECT_ID,
fields=['postcode', 'gp_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class LimbDemographicsRedcapInvalidEmailAddress(RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.uol_lamp,
project_id=REDCAP_PROJECT_ID,
fields=['email_add'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
| mit | 9,036,729,170,423,756,000 | 30.538462 | 81 | 0.6316 | false |
kyleellefsen/Glams | Glams/glams/glamsTemplate.py | 1 | 3083 | # encoding: utf-8
def glamsTemplate(article, username=None, resources='', rightbar=''):
'''Wraps an article string in the html template'''
if username:
userLogin="""<a href="/home">""" + username +"""</a>"""
else:
userLogin="""<a href="/home/login/">Login</a>"""
webpage="""<!DOCTYPE HTML>
<html>
<head>
<title>Glams</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8"/>
<link href='http://fonts.googleapis.com/css?family=Belleza' rel='stylesheet' type='text/css'>
<link rel="stylesheet" type="text/css" href="/support/css/almanacstyle.css" />
<link rel="stylesheet" type="text/css" href="/support/css/searchform.css" />
<link rel="stylesheet" type="text/css" href="/support/css/sharebubble.css" />
<script type="text/javascript" src="/support/javascript/jquery.js"></script>
<script type="text/javascript" src="/support/javascript/jquery-ui.js"></script>
<script type="text/javascript" src="/support/javascript/glamsscript.js"></script>
<script type="text/javascript" src="/support/javascript/jquery-color.js"></script>
<link rel="icon" type="image/png" href="/support/images/neurons.png">
"""+resources+"""
</head>
<body>
<div class='bubble' style='display:none;'></div>
<div id="everything">
<header>
<div id='logo'><a href='/'> <p style="color: white;font-family:'Belleza', sans-serif;">Glams Database</p> </a></div>
<!-- <form id='topsearchform' method="post" action="/search/" class="searchform">
<input class="searchfield" name="tags" type="text" value="Search..." onfocus="if (this.value == 'Search...') {this.value = '';}" onblur="if (this.value == '') {this.value = 'Search...';}" />
<input class="searchbutton" type="submit" value="Go" />
</form> -->
<div id='userLoginBox'>
<div id='userLogin'>"""+userLogin+"""</div>"""
if username:
webpage+="""
<ul id='userLoginList'>
<li><a class='button-link' href='/home/logout/'>Log out</a></li>
<li><a class='button-link' href='/home/settings/'>Account Settings</a></li>
</ul>"""
webpage+="""
</div>
</header>
<div id="content">
<article>"""
webpage+= article+"""</article>
<div id='between_article_and_aside'></div>
<aside>"""
webpage+=rightbar+"""</aside>
</div>
</div>
<footer><a href='http://scitru.com/kyleellefsen/'>Kyle Ellefsen. © 2015-2016 </footer>
</body>
</html>"""
return webpage
| mit | -3,873,556,045,646,859,300 | 54.035714 | 214 | 0.49416 | false |
yolanother/ubuntumobidev_ubiquity | ubiquity/components/yabootinstaller.py | 1 | 1335 | # -*- coding: utf-8; Mode: Python; indent-tabs-mode: nil; tab-width: 4 -*-
# Copyright (C) 2006 Canonical Ltd.
# Written by Colin Watson <[email protected]>.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from ubiquity.filteredcommand import FilteredCommand
class YabootInstaller(FilteredCommand):
def prepare(self):
return (['/usr/lib/ubiquity/yaboot-installer/yaboot-installer',
'/target'], ['ERROR'])
def error(self, priority, question):
self.frontend.error_dialog(self.description(question),
self.extended_description(question))
return FilteredCommand.error(self, priority, question)
| gpl-3.0 | -2,899,282,235,284,080,600 | 42.064516 | 76 | 0.716854 | false |
knarfeh/HiaBlog | app/main/signals.py | 1 | 1539 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
from flask import request, current_app
from blinker import Namespace
from . import models, ext
from app.hia.config import HiaBlogSettings
hiablog_signals = Namespace()
post_visited = hiablog_signals.signal('post-visited')
post_published = hiablog_signals.signal('post-published')
search_engine_submit_urls = HiaBlogSettings['search_engine_submit_urls']
@post_visited.connect
def on_post_visited(sender, post, **extra):
tracker = models.Tracker()
tracker.post = post
proxy_list = request.headers.getlist('X-Forwarded-For')
tracker.ip = request.remote_addr if not proxy_list else proxy_list[0]
tracker.user_agent = request.headers.get('User-Agent')
tracker.save()
try:
post_statistic = models.PostStatistics.objects.get(post=post)
except models.PostStatistics.DoesNotExist:
post_statistic = models.PostStatistics()
post_statistic.post = post
from random import randint
post_statistic.verbose_count_base = randint(500, 5000)
post_statistic.save()
post_statistic.modify(inc__visit_count=1)
@post_published.connect
def on_post_pubished(sender, post, **extra):
post_url = request.host + post.get_absolute_url()
# print post_url
baidu_url = search_engine_submit_urls['baidu']
if baidu_url:
# print 'Ready to post to baidu'
res = ext.submit_url_to_baidu(baidu_url, post_url)
print res.status_code, res.text
else:
print 'Not ready to submit urls yet'
| gpl-2.0 | -2,330,866,395,023,611,400 | 28.596154 | 73 | 0.693957 | false |
timy/dm_spec | pkg/pymod_data.py | 1 | 11796 | import numpy as np
def readDirIndex(fileName):
import json
fileDir = open(fileName)
idxDir = json.load(fileDir)['order']
fileDir.close()
order = ['1', '3', '5']
idxStart = [0, 6, 44]
return order, idxDir, idxStart
def calcAmplitude(fileNameFunc, nt, nCoo, iDir):
s = np.zeros(nt)
for iCoo in range(nCoo):
data = np.loadtxt( fileNameFunc(iCoo, iDir) )
for i in range(2):
s += data[:,i]**2
return np.sqrt(s)
# this function is not used, the index of the direction l= [l1, l2, l3]
def indexOfDir( idxDir, l ):
# for iOrder in range(nOrder):
for index, item in enumerate(idxDir):
if item == l:
return index
def calcAmplitudeMax(fileNameFunc, nt, nDir, nCoo=3, idxStart=0):
maxAmp = np.zeros(nDir)
for iDir in range(nDir):
maxAmp[iDir] = np.amax( calcAmplitude(fileNameFunc, nt, nCoo, idxStart+iDir) )
print( "%d: %le" % (iDir, maxAmp[iDir]) )
return maxAmp
def plotAmplitudeMax(maxAmp, idxDir, fileName):
import matplotlib.pyplot as plt
from matplotlib.ticker import MaxNLocator
nDir = len(idxDir)
fig = plt.figure(figsize=(26, 10))
ax = fig.add_subplot( 1, 1, 1 )
ax.plot(maxAmp, marker='.')
ax.grid(True)
ax.xaxis.set_major_locator(MaxNLocator(nbins=nDir-1, integer=True))
ax.set_xticks(range(nDir))
ax.set_xticklabels(idxDir, rotation='90')
plt.savefig(fileName, bbox_inches='tight')
plt.close(fig)
# def var_direction():
# dir1 = [ [ 1, 0, 0], [ 0, 1, 0], [ 0, 0, 1],
# [-1, 1, 1], [ 1,-1, 1], [ 1, 1,-1],
# [-2, 1, 0], [ 1,-2, 0], [-2, 0, 1],
# [ 0,-2, 1], [ 1, 0,-2], [ 0, 1,-2] ]
# dir2 = map( lambda a1: map( lambda a2: -1 * a2, a1 ), dir1 )
# dir3 = [ [ 3, 0, 0], [ 0, 3, 0], [ 0, 0, 3],
# [ 2, 1, 0], [ 2, 0, 1], [ 1, 2, 0],
# [ 0, 2, 1], [ 1, 0, 2], [ 0, 1, 2],
# [ 1, 1, 1] ]
# dir4 = map( lambda a1: map( lambda a2: -1 * a2, a1 ), dir3)
# # flatten to obtain a whole list of all directions
# direction = [ elem for s in [dir1, dir2, dir3, dir4] for elem in s ]
# return direction
# def var_coord():
# return [ 'x', 'y', 'z' ]
# def var_re_im():
# return [ "Re", "Im" ]
# def var_key_name( i_dir, i_coo, i_re ):
# return "%s[%2d,%2d,%2d]%s"%(i_re, i_dir[0], i_dir[1], i_dir[2], i_coo)
# def construct_var_name():
# direction, coord, re_im = var_direction(), var_coord(), var_re_im()
# name = []
# for i_dir in direction:
# for i_coo in coord:
# for i_re in re_im:
# name.append( var_key_name( i_dir, i_coo, i_re ) )
# return name
# def extract_data( d ):
# name_list = construct_var_name()
# indx_list = range( 1, 1+len(name_list) )
# data_list = [ d[:, i] for i in indx_list ]
# return dict( zip( name_list, data_list ) )
# def plot_data( data, dat_idx, handle, legend, ax, lim=[],
# lbl="", factor=1 ):
# t = data[:,0]
# dat = extract_data( data )
# if lim:
# idx_range = range( *map( lambda v: (np.abs(t-v)).argmin(), lim ) )
# else:
# idx_range = range( len(t) )
# for i_dat in dat_idx:
# legend.append( "%s:%s" % (lbl, i_dat) )
# handle.append( ax.plot( t[idx_range],
# dat[i_dat][idx_range] * factor,
# marker='.')[0] )
# def plot_data_abs( data, dir_idx, handle, legend, ax, lim=[],
# lbl="", factor=1 ):
# t = data[:,0]
# dat = extract_data( data )
# if lim:
# idx_range = range( *map( lambda v: (np.abs(t-v)).argmin(), lim ) )
# else:
# idx_range = range( len(t) )
# coord, re_im = var_coord(), var_re_im()
# for i_dir in dir_idx:
# s = np.zeros( len(t) )
# for i_coo in coord:
# for i_re in re_im:
# name = var_key_name( i_dir, i_coo, i_re )
# s += np.array( dat[name] ) * np.array( dat[name] )
# s = np.array( map( np.sqrt, s ) )
# legend.append( "%s:%s" % (lbl, i_dir) )
# handle.append( ax.plot( t[idx_range],
# s[idx_range] * factor,
# marker='.')[0] )
import matplotlib.pyplot as plt
# import pymod_plot
# import sys
# def plot_data_abs_all( file_name, lbl, factor=[], lim=[] ):
# """
# Loop over all directions. Each image shows abs results for
# the same direction (results may come from different files)
# """
# data = map( np.loadtxt, file_name )
# n_file = len( file_name )
# direction = var_direction()
# for i_dir in direction:
# fig = plt.figure()
# ax = fig.add_subplot( 1, 1, 1 )
# handle, legend = [], []
# print "Processing direction %s..." % i_dir
# for i in range(n_file):
# plot_data_abs( data[i], [i_dir], handle, legend, ax, lim,
# lbl[i], factor[i] )
# fname = "fig/ppar_%2d_%2d_%2d.png" % (i_dir[0], i_dir[1], i_dir[2])
# pymod_plot.plot_1d_draw( handle, legend, ax,
# xlabel=r"Time (fs)",
# ylabel=r"Polarization (arb.)",
# filename=fname,
# show=False )
# #plt.xlim( [ t[idx_range[0]], t[idx_range[-1]] ] )1
# plt.close()
def fft_1d( t, ns, data, t0=0.0, row=True, inv=False, w_shift=[],
debug=False ):
C_cm2au, C_fs2au = 4.55941e-6, 41.3413733
coef = C_cm2au * C_fs2au
if row == True:
signal = np.zeros( (ns, len(t)), dtype=complex )
else:
signal = np.zeros( (len(t), ns), dtype=complex )
# frequencies
dt= t[1] - t[0]
idx_0 = np.abs( t - t0 ).argmin()
if inv == False: # don't know if this is correct..
f = np.fft.fftshift( np.fft.fftfreq( len(t), dt*C_fs2au ) )
else:
f = np.fft.ifftshift( np.fft.fftfreq( len(t), dt*C_fs2au ) )
w = 2.0 * np.pi * f / C_cm2au
if w_shift:
w -= w_shift
# data
for i in range(ns):
if row == True:
data_1d = data[i,:]
else:
data_1d = data[:,i]
data_1d = np.squeeze( np.asarray( data_1d ) )
# hanning window
# data_1d = data_1d * np.hanning(len(data_1d))
if w_shift:
data_1d = map( lambda it: data_1d[it] *
np.exp( 1j * coef * w_shift * t[it] ),
range( len(t) ) )
d1 = np.roll( data_1d, 0-idx_0 )
if debug == True:
plt.figure()
t1 = np.roll( t, 0-idx_0 )
plt.plot( t1, np.abs(d1), marker='.' )
if inv == False:
ft = np.fft.fftshift( np.fft.fft( d1 ) )
else:
ft = np.fft.fftshift( np.fft.ifft( d1 ) )
if row == True:
signal[i,:] = ft
else:
signal[:,i] = ft
return w, signal
def plot_signal_1d( t, data, func, handle, legend, ax, obj=[],
t0=0.0, lim=[], lbl="", factor=1, padding=0.0,
w_shift=[] ):
"""
plot_signal_1d is used to plot multiple (columns of) 1d signal/spec
from a single file. If freq is designated, the FFT is conducted
for freq domain
data: the data load from file storing signal
dat_idx: list of column idx and descriptions: [[3, "z-axis"], ...]
fun_idx: list of functions to apply on data e.g., [ np.real, np.abs ]
handle: handle of plots
legend: handle of legends
ax: axis of the main figure
t0: the t0 time for FFT
lim: list, defining the time range for use, e.g., [-300, 500]
lbl: the common label for plots of current data
factor: the scale of current plot
padding [fs]: the extra time padding for higher resolution with FFT
thus "lim" can be go beyond actual data after padding
|++++++++++ data format ++++++++++|
"t" has the format:
t0 t1 t2 ... tn, should be read from file "res/time.dat"
------------------------------
"data" has the format:
Re[p(t0)] Im[p(t0)]
Re[p(t1)] Im[p(t1)]
...
Re[p(tn)] Im[p(tn)]
* The i-th row represent signals for the i-th time point
* Each line can be defined as a complex variable
------------------------------
"""
legend.append( "%s" % lbl )
# convert data to complex variable, and Pol -> Ef
d = 1j * ( data[:,0] + 1j * data[:,1] )
dt = t[1] - t[0] # fs.
# padding should be done before invoking other operations
if padding > 0:
t_padding = np.arange( t[-1]+dt, t[-1]+padding+dt, dt )
n_padding = len(t_padding)
t = np.concatenate( [t, t_padding] )
d = np.concatenate( [ d, np.zeros( n_padding, dtype=complex ) ] )
# find index within lim
if lim:
idx_range = range( *map( lambda v: (np.abs(t-v)).argmin(), lim ) )
else:
idx_range = range( len(t) )
# truncated time
tt, dd = t[idx_range], d[idx_range]
if obj == 'p': # draw the 'polarization' to be fft transformed
handle.append( ax.plot( tt, func( dd ) * factor, marker='.' )[0] )
return
dd = np.asmatrix( dd ) # shape (n,) -> (1, n)
w, signal = fft_1d( tt, 1, dd, t0=t0, row=True, inv=False,
w_shift=w_shift )
signal = np.squeeze( signal ) # shape (1, n) -> (n,)
handle.append( ax.plot( w, func( signal ) * factor, marker='.' )[0] )
def plot_2D( signal, ax, extent ):
origin = 'lower'
norm = plt.cm.colors.Normalize(vmax=signal.max(), vmin=signal.min())
im = ax.imshow( signal, cmap=plt.cm.summer, norm=norm,
#aspect=1 ) #aspect='auto',
origin=origin,
extent=extent )
ax.contour( signal, 20, hold='on', colors = 'k',
origin=origin,
extent=extent )
from mpl_toolkits.axes_grid1 import make_axes_locatable
divider = make_axes_locatable(ax)
cax = divider.append_axes("right", size="5%", pad=0.5)
cbar = plt.colorbar(im, cax=cax)
cbar.ax.tick_params(labelsize=16)
import sys
def plot_signal_2d( t, s, data, func, ax, obj=[],
t0=[0.0, 0.0], lim=[], lbl="",
factor=1, padding=0.0,
w_shift=[] ):
nt, ns = len(t), len(s)
print nt, ns
print len(data)
print np.shape(data)
if ( len(data) != nt*ns ) or ( np.shape(data)[1] != 2 ):
print( "data does not have proper shape!" )
sys.exit()
# t_lower, t_upper, s_lower, s_upper = t[0], t[-1], s[0], s[-1]
data = np.reshape( data[:,0] + 1j * data[:,1], (ns, nt) )
# convert Pol -> Ef
data *= +1j
if obj == 'p': # draw the 'polarization' to be fft transformed
extent = ( t[0], t[-1], s[0], s[-1])
plot_2D( func(data), ax, extent );
return
x, signal = fft_1d( t, ns, data, t0=t0[0], row=True, inv=False,
w_shift=w_shift[0] )
y, signal = fft_1d( s, nt, signal, t0=t0[1], row=False, inv=False,
w_shift=w_shift[1] )
# the correct one for (FFT, IFFT) sequence
# then no need to process the signal if axes is given by ( w_t, -w_tau )
# since the signal under IFFT is automatically generated for -w_tau
# extent = (x[0], x[-1], -y[-1], -y[0])
# the most general results
# the correct one for (FFT, FFT), with axes given by ( w_t, w_tau )
# extent = (x[0], x[-1], y[0], y[-1])
# After general (FFT, FFT), can also flip for axes ( w_t, -w_tau )
# can be directly compare with paper
signal = np.flipud( signal )
extent = ( x[0], x[-1], -y[-1], -y[0] )
plot_2D( func(signal), ax, extent )
| mit | 5,346,539,297,204,704,000 | 35.63354 | 86 | 0.508562 | false |
oscurart/BlenderAddons | old/oscurart_resize_resolution.py | 1 | 1753 | # Compensa el tamanio de imagen al modificar el lente de la camara.
bl_info = {
"name": "Resize Render Resolution",
"author": "Oscurart",
"version": (1, 0),
"blender": (2, 66, 0),
"location": "Search > Resize Resolution by Camera Angle",
"description": "Resize render dimension by camera angle.",
"warning": "",
"wiki_url": "",
"tracker_url": "",
"category": "Render"}
import bpy
import math
def defResizeResolution(context, anguloInicio, anguloPrimero, resx, resy):
# calcula valores
anguloActual= math.degrees(anguloInicio/ 2)
proportionxy = resx / resy
opuesto = resx / 2
adyacente = opuesto / math.tan(anguloInicio / 2)
newx = (adyacente * math.tan(math.radians(anguloPrimero/2))) * 2
# setea valores
context.scene.render.resolution_x = newx
context.scene.render.resolution_y = newx / proportionxy
context.scene.camera.data.angle = math.radians(anguloPrimero)
class ResizeResolution(bpy.types.Operator):
bl_idname = "scene.resize_resolution"
bl_label = "Resize Resolution by Camera Angle"
bl_options = {"REGISTER", "UNDO"}
anguloPrimero = bpy.props.FloatProperty(name="Field of View", default=math.degrees(.8575), min=.01 )
def execute(self, context):
anguloInicio = context.scene.camera.data.angle
resx = context.scene.render.resolution_x
resy = context.scene.render.resolution_y
print(resx)
defResizeResolution(context, anguloInicio, self.anguloPrimero, resx, resy)
return {'FINISHED'}
def register():
bpy.utils.register_class(ResizeResolution)
def unregister():
bpy.utils.unregister_class(ResizeResolution)
if __name__ == "__main__":
register()
| gpl-2.0 | 4,949,207,930,317,479,000 | 27.274194 | 104 | 0.661723 | false |
BjornFJohansson/molbio-test-generator | exam_generator.py | 1 | 3576 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
make_only_first_exam = False
encrypt_exam = True
import time
import string
import re
#import py7zlib
import subprocess
import os
import shelve
import shutil
from bio_info_questions import *
from exam_settings import *
import ParseStudentList
mecs, names = ParseStudentList.parse_student_file(student_list_file)
if not encrypt_exam:
print "No encryption!"
password_to_open_exam=""
password_to_see_correct_exam=""
else:
print u"password_to_open_exam = {}".format(password_to_open_exam)
print u"password_to_see_correct_exam = {}".format(password_to_see_correct_exam)
password_to_open_exam= u"-p{}".format(password_to_open_exam)
password_to_see_correct_exam= u"-p{}".format(password_to_see_correct_exam)
studentlist = zip(mecs,names)
if make_only_first_exam:
studentlist = studentlist[:1]
shelf = shelve.open(shelf_file)
if not os.path.isdir(exam_folder):
os.makedirs(exam_folder)
for student in studentlist:
timestamp = int(time.time())
mec, name = student
print "Start prep exam for",mec,name
q=[]
q.append( reverse_complement.question(1,50) )
q.append( change_origin.question(2) )
q.append( find_feature_rc.question(1) )
q.append( find_region_of_similarity.question(4) )
q.append( find_repeated_sequences.question(4) )
q.append( pcr_cloning.question(8) )
empty_exam = header.format(name=name,
mec=mec,
timestamp=timestamp,
question_separator=question_separator,
number_of_questions=len(q) )
correct_exam = empty_exam
for index, question in enumerate(q):
empty_exam += question_separator.format(index+1)
correct_exam += question_separator.format(index+1)
empty_exam += question.empty_question
correct_exam += question.correct_answer
shelf[question.id] = question
empty_exam += endseparator
correct_exam += endseparator
empty_exam = re.sub("\r?\n", "\r\n", empty_exam)
correct_exam = re.sub("\r?\n", "\r\n", correct_exam)
if os.path.exists(u"/tmp/exam"):
shutil.rmtree(u"/tmp/exam")
os.makedirs(u"/tmp/exam")
os.makedirs(u"/tmp/exam/files")
#os.chdir(u"/tmp/exam")
with open(u"/tmp/exam/correct_exam.txt".format(mec=mec), "w") as f:
f.write(correct_exam.encode("latin-1"))
cmd = u'7z a -tzip /tmp/exam/correct_exam_encrypted.zip /tmp/exam/correct_exam.txt {pw} '.format(pw=password_to_see_correct_exam)
slask=subprocess.call(cmd, shell=True)
os.remove(u"/tmp/exam/correct_exam.txt")
for file in os.listdir(included_files_location):
if "~" not in file and not file.startswith("."):
shutil.copy(os.path.join(included_files_location, file),u"/tmp/exam/files/"+file)
filename = u"{}_{}".format(name.replace(" ","_"),mec)
with open(u"/tmp/exam/{filename}.txt".format(filename=filename).format(mec=mec), "w") as f:
f.write( empty_exam.encode("latin-1"))
cmd = u'7za a -tzip "{exam_folder}/{filename}.zip" /tmp/exam/ {pw} '.format(pw = password_to_open_exam,
exam_folder = exam_folder,
filename = filename)
slask=subprocess.call(cmd, shell=True)
shelf.close()
print "Finished"
| bsd-2-clause | 3,588,146,955,561,264,000 | 31.509091 | 133 | 0.598993 | false |
JohnVinyard/zounds | zounds/learn/test_random_samples.py | 1 | 8672 | import unittest2
from .random_samples import \
ReservoirSampler, Reservoir, MultiplexedReservoir
from zounds.timeseries import TimeDimension, Seconds
from zounds.spectral import FrequencyDimension, FrequencyBand, LinearScale
from zounds.core import ArrayWithUnits, IdentityDimension
import numpy as np
class TestReservoir(unittest2.TestCase):
def test_nsamples_must_be_gt_zero(self):
self.assertRaises(ValueError, lambda: Reservoir(0))
def test_can_dictate_dtype(self):
r = Reservoir(100, dtype=np.float32)
r.add(np.ones(10, dtype=np.float64))
self.assertEqual(np.float32, r.get().dtype)
def test_reservoir_has_first_input_dtype_when_unspecified(self):
r = Reservoir(100)
r.add(np.ones(10, dtype=np.float64))
self.assertEqual(np.float64, r.get().dtype)
def test_raises_if_nsamples_is_not_int(self):
self.assertRaises(ValueError, lambda: Reservoir(1e2))
def test_array_has_correct_first_dimension(self):
r = Reservoir(100)
r.add(np.random.random_sample((10, 3)))
self.assertEqual(100, r.arr.shape[0])
def test_can_add_samples_larger_than_reservoir_size(self):
r = Reservoir(100)
r.add(np.random.random_sample((1000, 3)))
self.assertEqual(100, len(r.get()))
def test_array_has_correct_subsequent_dimensions(self):
r = Reservoir(100)
r.add(np.random.random_sample((10, 3, 2)))
self.assertEqual((3, 2), r.arr.shape[1:])
def test_array_with_units(self):
r = Reservoir(100)
frequency_dimension = FrequencyDimension(
LinearScale(FrequencyBand(100, 1000), 100))
samples = ArrayWithUnits(
np.ones((20, 100)),
[
TimeDimension(frequency=Seconds(1)),
frequency_dimension
])
r.add(samples)
mixed = r.get()
self.assertIsInstance(mixed, ArrayWithUnits)
self.assertEqual(100, mixed.shape[1])
self.assertIsInstance(mixed.dimensions[0], IdentityDimension)
self.assertIsInstance(mixed.dimensions[1], FrequencyDimension)
def test_reservoir_is_well_mixed(self):
r = Reservoir(100)
samples = np.arange(100)[..., None]
for i in range(0, 100, 10):
r.add(samples[i: i + 10])
mixed = r.get().squeeze()
diff = np.diff(mixed)
self.assertFalse(np.all(diff == 1))
def test_can_provide_explicit_indices_when_adding(self):
r = Reservoir(10)
samples = np.arange(10)[..., None]
r.add(samples, indices=samples.squeeze()[::-1])
mixed = r.get()
np.testing.assert_allclose(mixed.squeeze(), samples.squeeze()[::-1])
def test_raises_when_samples_and_explicit_indices_dont_match(self):
r = Reservoir(10)
samples = np.arange(10)[..., None]
self.assertRaises(
ValueError, lambda: r.add(samples, indices=samples.squeeze()[:5]))
def test_can_get_batch(self):
r = Reservoir(100)
samples = np.arange(100)[..., None]
for i in range(0, 100, 10):
r.add(samples[i: i + 10])
samples = r.get_batch(15)
self.assertEqual(15, samples.shape[0])
def test_raises_if_get_batch_is_larger_than_total_sample_size(self):
r = Reservoir(100)
samples = np.arange(100)[..., None]
for i in range(0, 100, 10):
r.add(samples[i: i + 10])
self.assertRaises(ValueError, lambda: r.get_batch(1000))
def test_raises_if_get_batch_is_larger_than_available_sample_size(self):
r = Reservoir(100)
samples = np.arange(100)[..., None]
for i in range(0, 50, 10):
r.add(samples[i: i + 10])
self.assertRaises(ValueError, lambda: r.get_batch(64))
class TestMultiplexedReservoir(unittest2.TestCase):
def test_is_consistent_across_keys(self):
r = MultiplexedReservoir(100)
samples = np.random.random_sample((10, 3))
r.add(dict(cat=samples, dog=samples))
mixed = r.get()
np.testing.assert_allclose(mixed['cat'], mixed['dog'])
def test_raises_when_wrong_set_of_keys_passed_to_add(self):
r = MultiplexedReservoir(100)
samples = np.random.random_sample((10, 3))
r.add(dict(cat=samples, dog=samples))
self.assertRaises(
ValueError, lambda: r.add(dict(rat=samples, frog=samples)))
def test_raises_when_some_keys_have_mismatched_lengths(self):
r = MultiplexedReservoir(100)
samples = np.random.random_sample((10, 3))
self.assertRaises(
ValueError, lambda: r.add(dict(cat=samples, dog=samples[:-1])))
def test_raises_when_some_keys_have_mismatched_lengths_second_add(self):
r = MultiplexedReservoir(100)
samples = np.random.random_sample((10, 3))
r.add(dict(cat=samples, dog=samples))
self.assertRaises(
ValueError, lambda: r.add(dict(cat=samples, dog=samples[:-1])))
def test_get_returns_dict_with_user_specified_keys(self):
r = MultiplexedReservoir(100)
samples = np.random.random_sample((10, 3))
d = dict(cat=samples, dog=samples)
r.add(d)
mixed = r.get()
self.assertEqual(set(d.keys()), set(mixed.keys()))
class TestReservoirSampler(unittest2.TestCase):
def test_can_sample_from_one_dimensional_feature(self):
sampler = ReservoirSampler(nsamples=10)
frequency_dimension = FrequencyDimension(
LinearScale(FrequencyBand(100, 1000), 100))
samples = ArrayWithUnits(
np.ones((20, 100)),
[
TimeDimension(frequency=Seconds(1)),
frequency_dimension
])
sampler._enqueue(samples, pusher=None)
reservoir = sampler._r
self.assertEqual((10, 100), reservoir.shape)
self.assertIsInstance(reservoir, ArrayWithUnits)
self.assertEqual(reservoir.dimensions[0], IdentityDimension())
self.assertEqual(reservoir.dimensions[1], frequency_dimension)
def test_can_wrap_samples(self):
sampler = ReservoirSampler(nsamples=10)
frequency_dimension = FrequencyDimension(
LinearScale(FrequencyBand(100, 1000), 100))
samples = ArrayWithUnits(
np.ones((2, 10, 100)),
[
TimeDimension(frequency=Seconds(10)),
TimeDimension(frequency=Seconds(1)),
frequency_dimension
])
sampler._enqueue(samples, pusher=None)
reservoir = sampler._r
self.assertEqual((10, 10, 100), reservoir.shape)
self.assertIsInstance(reservoir, ArrayWithUnits)
self.assertEqual(reservoir.dimensions[0], IdentityDimension())
self.assertEqual(reservoir.dimensions[1], samples.dimensions[1])
self.assertEqual(reservoir.dimensions[2], samples.dimensions[2])
def test_can_dequeue_when_reservoir_is_full(self):
sampler = ReservoirSampler(nsamples=10)
frequency_dimension = FrequencyDimension(
LinearScale(FrequencyBand(100, 1000), 100))
samples = ArrayWithUnits(
np.ones((10, 10, 100)),
[
TimeDimension(frequency=Seconds(10)),
TimeDimension(frequency=Seconds(1)),
frequency_dimension
])
sampler._enqueue(samples, pusher=None)
reservoir = sampler._dequeue()
self.assertEqual((10, 10, 100), reservoir.shape)
self.assertIsInstance(reservoir, ArrayWithUnits)
self.assertEqual(reservoir.dimensions[0], IdentityDimension())
self.assertEqual(reservoir.dimensions[1], samples.dimensions[1])
self.assertEqual(reservoir.dimensions[2], samples.dimensions[2])
def test_can_dequeue_when_reservoir_is_partially_full(self):
sampler = ReservoirSampler(nsamples=10)
frequency_dimension = FrequencyDimension(
LinearScale(FrequencyBand(100, 1000), 100))
samples = ArrayWithUnits(
np.ones((4, 10, 100)),
[
TimeDimension(frequency=Seconds(10)),
TimeDimension(frequency=Seconds(1)),
frequency_dimension
])
sampler._enqueue(samples, pusher=None)
reservoir = sampler._dequeue()
self.assertEqual((4, 10, 100), reservoir.shape)
self.assertIsInstance(reservoir, ArrayWithUnits)
self.assertEqual(reservoir.dimensions[0], IdentityDimension())
self.assertEqual(reservoir.dimensions[1], samples.dimensions[1])
self.assertEqual(reservoir.dimensions[2], samples.dimensions[2])
| mit | -2,302,916,460,329,528,000 | 36.541126 | 78 | 0.624654 | false |
eads/deck-blueprint | blueprint.py | 1 | 5367 | # -*- coding: utf-8 -*-
import codecs
import getpass
import json
import os
import requests
import shutil
from clint.textui import colored, puts
from cssmin import cssmin
from flask import g, Blueprint
from jinja2 import Markup
from slimit import minify
from smartypants import smartypants
from tarbell.hooks import register_hook
from tarbell.utils import ensure_directory
NAME = "deck.js slideshow"
EXCLUDES = [
'app',
'styles',
'lib',
'bower.json',
'requirements.txt',
'*.md',
]
blueprint = Blueprint('base', __name__)
class Includer(object):
"""
Base class for Javascript and CSS psuedo-template-tags.
See `make_context` for an explanation of `asset_depth`.
"""
def __init__(self):
self.includes = []
self.tag_string = None
def push(self, path):
self.includes.append(path)
return ''
def _compress(self):
raise NotImplementedError()
def _get_path(self, path):
blueprint_root = os.path.dirname(os.path.realpath(__file__))
project_path = os.path.join(blueprint_root, '../', path)
if os.path.isfile(project_path):
return project_path
blueprint_path = os.path.join(blueprint_root, path)
if os.path.isfile(blueprint_path):
return blueprint_path
def render(self, path):
config = g.current_site.app.config
# If we're in a build context, mash everything together
if config.get('BUILD_PATH'):
fullpath = os.path.join(config.get('BUILD_PATH'), path)
ensure_directory(fullpath)
with codecs.open(fullpath, 'w', encoding='utf-8') as f:
f.write(self._compress())
response = self.tag_string.format(path)
else:
response = '\n'.join([
self.tag_string.format(src) for src in self.includes
])
markup = Markup(response)
del self.includes[:]
return markup
class JavascriptIncluder(Includer):
"""
Psuedo-template tag that handles collecting Javascript and serving appropriate clean or compressed versions.
"""
def __init__(self, *args, **kwargs):
Includer.__init__(self, *args, **kwargs)
self.tag_string = '<script type="text/javascript" src="{0}"></script>'
def _compress(self):
output = []
for src in self.includes:
with codecs.open(self._get_path(src), encoding='utf-8') as f:
output.append(minify(f.read()))
return '\n'.join(output)
class CSSIncluder(Includer):
"""
Psuedo-template tag that handles collecting CSS and serving appropriate clean or compressed versions.
"""
def __init__(self, *args, **kwargs):
Includer.__init__(self, *args, **kwargs)
self.tag_string = '<link rel="stylesheet" type="text/css" href="{0}" />'
def _compress(self):
output = []
for src in self.includes:
with codecs.open(self._get_path(src), encoding='utf-8') as f:
output.append(cssmin(f.read()))
return '\n'.join(output)
@blueprint.app_context_processor
def context_processor():
"""
Add helper functions to context for all projects.
"""
return {
'JS': JavascriptIncluder(),
'CSS': CSSIncluder(),
'enumerate': enumerate,
}
@register_hook('newproject')
def copy_files(site, git):
"""
Copy the files
"""
puts('\nCopying files from blueprint\n')
style_dir = '{0}/styles'.format(site.path)
os.mkdir(style_dir)
style_src_path = '{0}/_blueprint/styles/style.css'.format(site.path)
shutil.copy(style_src_path, style_dir)
git.add('styles/style.css')
git.commit(m='Add style.css')
slide_dir = '{0}/_slides'.format(site.path)
os.mkdir(slide_dir)
slide_src_path = '{0}/_blueprint/_slides/example.md'.format(site.path)
shutil.copy(slide_src_path, slide_dir)
git.add('_slides/example.md')
git.commit(m='Add example slide content')
bowerrc_src_path = '{0}/_blueprint/.bowerrc'.format(site.path)
shutil.copy(bowerrc_src_path, site.path)
git.add('.bowerrc')
git.commit(m='Add Bower configuration')
@register_hook('newproject')
def create_repo(site, git):
"""
Create repo when making new project
"""
create = raw_input("Want to create a Github repo for this project [Y/n]? ")
if create and not create.lower() == "y":
return puts("Not creating Github repo...")
name = site.path.split('/')[-1]
user = raw_input("What is your Github username? ")
password = getpass.getpass("What is your Github password? ")
headers = {'Content-type': 'application/json', 'Accept': 'application/json'}
data = {'name': name, 'has_issues': True, 'has_wiki': True}
resp = requests.post('https://api.github.com/user/repos', auth=(user, password), headers=headers, data=json.dumps(data))
puts("Created {0}".format(colored.green("https://github.com/{0}/{1}".format(user, name))))
clone_url = resp.json().get("clone_url")
puts(git.remote.add("origin", "[email protected]:{0}/{1}.git".format(user,name)))
puts(git.push("origin", "master"))
@blueprint.app_template_filter('smartypants')
def smartypants_filter(text):
"""
Smarty pants
"""
if text:
return smartypants(text)
else:
return ''
| mit | 422,399,089,869,528,400 | 27.854839 | 124 | 0.617477 | false |
inTagger/UpdateDDNS | DNSPod/DNSPodAPI.py | 1 | 3202 | __author__ = 'Alexander Logger'
from string import Template
import requests
import json
import logging
base_url = 'https://www.dnspod.com/api'
get_records_url_tpl = Template(base_url + '/records/$domain')
modify_record_url_tpl = Template(base_url + '/records/$domain/$record_id')
get_auth_token_url_tpl = Template(base_url + '/auth?email=$email&password=$password')
class DNSPodAPI():
def __init__(self, auth_token: dict=None):
self.auth_token = auth_token
self.logger = logging.getLogger("UpdateDDNS")
@staticmethod
def get_auth_token(email: str, password: str) -> dict:
url = get_auth_token_url_tpl.substitute(email=email, password=password)
auth_token = requests.get(url).json()
return auth_token
def auth(self, email: str, password: str):
self.auth_token = DNSPodAPI.get_auth_token(email, password)
def get_domain_records(self, domain: str) -> dict:
self._check_auth()
url = get_records_url_tpl.substitute(domain=domain)
records = requests.get(url, cookies=self.auth_token).json()
return records
def modify_domain_record(self, domain: str, record: dict) -> dict:
self._check_auth()
url = modify_record_url_tpl.substitute(domain=domain, record_id=record['id'])
resp = requests.put(url, data=json.dumps(record), cookies=self.auth_token).json()
return resp
def update_ips(self, domain: str, sub_domains: dict, ipv4: str, ipv6: str):
self.logger.info("Getting records for domain '%s'..." % domain)
records = self.get_domain_records(domain)
self.logger.info(' Got %d records.' % len(records))
self.logger.info("Updating records...")
records_updated = 0
for RECORD in records:
# self.logger.info(" '%s' " % RECORD['sub_domain'], end='')
sub_domain = RECORD['sub_domain']
if sub_domain not in sub_domains:
continue
if RECORD['record_type'] not in sub_domains[sub_domain]:
continue
if RECORD['area'] != 'default':
# stub for default area only, unless api doesn't return area value as string, not as code.
continue
record_type = RECORD['record_type']
if record_type == 'A':
value = ipv4
elif record_type == 'AAAA':
value = ipv6
else:
continue
if RECORD['value'] == value: # Skip when record already contains actual value.
continue
self.logger.info(" '%s' [%s]->[%s]... " % (sub_domain, RECORD['value'], value))
record = {'id': RECORD['id'], 'area': '0', 'sub_domain': sub_domain, 'record_type': record_type,
'value': value, 'ttl': RECORD['ttl']}
resp = self.modify_domain_record(domain, record)
self.logger.info(resp['message'] + '.')
records_updated += 1
self.logger.info("%d records were updated." % records_updated)
def _check_auth(self):
if self.auth_token is None:
raise NotAuthenticated()
class NotAuthenticated(Exception):
pass | mit | -6,272,639,740,326,752,000 | 36.682353 | 108 | 0.590881 | false |
jtucker1972/XBMC-TVTime | resources/lib/Overlay.py | 1 | 68585 | # Copyright (C) 2011 James A. Tucker
#
#
# This file is part of TV Time.
#
# TV Time is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# TV Time is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with TV Time. If not, see <http://www.gnu.org/licenses/>.
import xbmc, xbmcgui, xbmcaddon
import subprocess, os
import time, threading, thread
import datetime
import sys, re
import random
from operator import itemgetter
from time import time, localtime, strftime, strptime, mktime, sleep
from datetime import datetime, date, timedelta
from decimal import *
import Globals
from xml.dom.minidom import parse, parseString
from Playlist import Playlist
from Globals import *
from Channel import Channel
from EPGWindow import EPGWindow
from ChannelList import ChannelList
from PrestageThread import *
class MyPlayer(xbmc.Player):
def __init__(self):
xbmc.Player.__init__(self, xbmc.PLAYER_CORE_AUTO)
self.stopped = False
def log(self, msg, level = xbmc.LOGDEBUG):
log('Player: ' + msg, level)
def onPlayBackStopped(self):
if self.stopped == False:
self.log('Playback stopped')
if self.overlay.sleepTimeValue == 0:
self.overlay.sleepTimer = threading.Timer(1, self.overlay.sleepAction)
self.overlay.sleepTimeValue = 1
self.overlay.startSleepTimer()
self.stopped = True
# overlay window to catch events and change channels
class TVOverlay(xbmcgui.WindowXMLDialog):
def __init__(self, *args, **kwargs):
xbmcgui.WindowXMLDialog.__init__(self, *args, **kwargs)
self.log('Overlay: __init__')
# initialize all variables
self.channels = []
self.Player = MyPlayer()
self.Player.overlay = self
self.inputChannel = -1
self.channelLabel = []
self.lastActionTime = 0
self.actionSemaphore = threading.BoundedSemaphore()
self.setCoordinateResolution(1)
self.timeStarted = 0
self.infoOnChange = True
self.infoOffset = 0
self.invalidatedChannelCount = 0
self.showingInfo = False
self.showChannelBug = False
random.seed()
for i in range(3):
self.channelLabel.append(xbmcgui.ControlImage(50 + (50 * i), 50, 50, 50, IMAGES_LOC + 'solid.png', colorDiffuse='0xAA00ff00'))
self.addControl(self.channelLabel[i])
self.channelLabel[i].setVisible(False)
self.doModal()
self.log('Overlay: __init__ return')
def resetChannelTimes(self):
curtime = time.time()
for i in range(self.maxChannels):
self.channels[i].setAccessTime(curtime - self.channels[i].totalTimePlayed)
def onFocus(self, controlId):
pass
# override the doModal function so we can setup everything first
def onInit(self):
self.log('Overlay: onInit')
migrate()
self.channelLabelTimer = threading.Timer(5.0, self.hideChannelLabel)
self.infoTimer = threading.Timer(5.0, self.hideInfo)
self.background = self.getControl(101)
self.getControl(102).setVisible(False)
self.channelList = ChannelList()
# need to reset for scheduled auto channel reset
Globals.prestageThreadExit = 0
# setup directories
self.createDirectories()
self.myEPG = EPGWindow("script.pseudotv.EPG.xml", ADDON_INFO, "default")
self.myEPG.MyOverlayWindow = self
# Don't allow any actions during initialization
self.actionSemaphore.acquire()
self.log('Overlay: Read Config')
if self.readConfig() == False:
return
# build meta files if first time loading
if (
REAL_SETTINGS.getSetting("bumpers") == "true" or
REAL_SETTINGS.getSetting("commercials") == "true" or
REAL_SETTINGS.getSetting("trailers") == "true"
):
self.buildMetaFiles()
# read in channel playlists in video, music and mixed folders
channelNum = 0
for i in range(500):
if os.path.exists(xbmc.translatePath('special://profile/playlists/video') + '/Channel_' + str(i + 1) + '.xsp'):
channelNum = channelNum + 1
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_type", "0")
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_time", "0")
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_1", xbmc.translatePath('special://profile/playlists/video/') + 'Channel_' + str(i + 1) + '.xsp')
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_2", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_3", self.channelList.cleanString(self.channelList.getSmartPlaylistName(xbmc.translatePath('special://profile/playlists/video') + '/Channel_' + str(i + 1) + '.xsp')))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_4", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_5", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_6", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_7", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_8", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_9", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_playlist", xbmc.translatePath('special://profile/playlists/video/') + 'Channel_' + str(i + 1) + '.xsp')
#self.updateDialog(progressIndicator,"Auto Tune","Found " + str(self.channelList.getSmartPlaylistName(xbmc.translatePath('special://profile/playlists/video') + '/Channel_' + str(i + 1) + '.xsp')),"")
elif os.path.exists(xbmc.translatePath('special://profile/playlists/mixed') + '/Channel_' + str(i + 1) + '.xsp'):
channelNum = channelNum + 1
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_type", "0")
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_time", "0")
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_1", xbmc.translatePath('special://profile/playlists/mixed/') + 'Channel_' + str(i + 1) + '.xsp')
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_2", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_3", self.channelList.cleanString(self.channelList.getSmartPlaylistName(xbmc.translatePath('special://profile/playlists/mixed') + '/Channel_' + str(i + 1) + '.xsp')))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_4", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_5", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_6", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_7", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_8", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_9", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_playlist", xbmc.translatePath('special://profile/playlists/mixed/') + 'Channel_' + str(i + 1) + '.xsp')
#self.updateDialog(progressIndicator,"Auto Tune","Found " + str(self.channelList.getSmartPlaylistName(xbmc.translatePath('special://profile/playlists/mixed') + '/Channel_' + str(i + 1) + '.xsp')),"")
elif os.path.exists(xbmc.translatePath('special://profile/playlists/music') + '/Channel_' + str(i + 1) + '.xsp'):
channelNum = channelNum + 1
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_type", "0")
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_time", "0")
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_1", xbmc.translatePath('special://profile/playlists/music/') + 'Channel_' + str(i + 1) + '.xsp')
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_2", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_3", self.channelList.cleanString(self.channelList.getSmartPlaylistName(xbmc.translatePath('special://profile/playlists/music') + '/Channel_' + str(i + 1) + '.xsp')))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_4", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_5", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_6", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_7", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_8", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_9", str(""))
ADDON_SETTINGS.setSetting("Channel_" + str(channelNum) + "_playlist", xbmc.translatePath('special://profile/playlists/music/') + 'Channel_' + str(i + 1) + '.xsp')
#self.updateDialog(progressIndicator,"Auto Tune","Found " + str(self.channelList.getSmartPlaylistName(xbmc.translatePath('special://profile/playlists/music') + '/Channel_' + str(i + 1) + '.xsp')),"")
if (REAL_SETTINGS.getSetting("autoFindMixGenres") == "true" or
REAL_SETTINGS.getSetting("autoFindMovieGenres") == "true" or
REAL_SETTINGS.getSetting("autoFindNetworks") == "true" or
REAL_SETTINGS.getSetting("autoFindStudios") == "true" or
REAL_SETTINGS.getSetting("autoFindTVGenres") == "true" or
REAL_SETTINGS.getSetting("autoFindTVShows") == "true" or
REAL_SETTINGS.getSetting("autoFindMusicGenres") == "true" or
REAL_SETTINGS.getSetting("autoFindLive") == "true"):
Globals.resetSettings2 = 1
Globals.resetPrestage = 1
self.channelList.autoTune()
# There are two types of force resets
# 1. Force All Channels Reset (Addon Setting)
# 2. Force a changed channel to reset (Channel Config Change)
forceReset = int(REAL_SETTINGS.getSetting("ForceChannelReset"))
# Loop through each channel and determine if channel setting has changed
self.dlg = xbmcgui.DialogProgress()
self.dlg.create("TV Time", "Channel Check")
progressIndicator = 0
self.log("setMaxChannels")
self.channelList.setMaxChannels()
maxChannels = int(REAL_SETTINGS.getSetting("maxChannels"))
for i in range(maxChannels):
progressIndicator = progressIndicator + (100/maxChannels)
self.dlg.update(progressIndicator,"Channel Check","Checking if Channel " + str(i+1) + " needs to be reset")
channelChanged = ADDON_SETTINGS.getSetting("Channel_" + str(i+1) + "_changed")
if channelChanged == "true":
self.log("Channel Configuration Changed")
self.log("Resetting Channel Playlist " + str(i+1))
# rebuild playlist
self.channelList.resetPlaylist(i+1)
# force channel reset does not use pre-staged file lists
# this will only reset the channel that changed
# it will not reset channels which have not changed
# only want to force channel reset once, so if force reset
# is on then skip since we will force reset the channel later
if forceReset == 0:
self.log("Force Channel " + str(i+1) + " Reset")
# reset only the specified channel
self.forceChannelReset(i+1)
Globals.resetPrestage = 1
self.dlg.close()
# update settings2.xml file
ADDON_SETTINGS.writeSettings()
# pause while settings file is being written to
while int(Globals.savingSettings) == 1:
pass
# Check if a force reset is required for all channels
# This will force rebuilding of ALL channel file lists
if forceReset > 0:
self.log("Force All Channels Reset")
# reset all channels
self.forceChannelReset("all")
Globals.resetPrestage = 1
# check auto reset
if self.checkAutoChannelReset() == True:
self.log("Auto Reset Channels")
# auto channel reset copies over pre-staged file lists to speed up loading
self.autoChannelReset()
# update live channels
self.resetLiveChannels()
# time to load in the channels
if self.loadChannels() == False:
return
self.myEPG.channelLogos = self.channelLogos
self.maxChannels = len(self.channels)
if self.maxChannels == 0:
#self.Error('Unable to find any channels. \nPlease go to the Addon Settings to configure TV Time.')
#return
dlg = xbmcgui.Dialog()
autoTune = False
if dlg.yesno("No Channels Configured", "Would you like TV Time to Auto Tune TV Network\nchannels the next time it loads?"):
REAL_SETTINGS.setSetting("autoFindNetworks","true")
autoTune = True
if dlg.yesno("No Channels Configured", "Would you like TV Time to Auto Tune TV Genre\nchannels the next time it loads?"):
REAL_SETTINGS.setSetting("autoFindTVGenre","true")
autoTune = True
if dlg.yesno("No Channels Configured", "Would you like TV Time to Auto Tune Movie Studio\nchannels the next time it loads?"):
REAL_SETTINGS.setSetting("autoFindStudios","true")
autoTune = True
if dlg.yesno("No Channels Configured", "Would you like TV Time to Auto Tune Movie Genre\nchannels the next time it loads?"):
REAL_SETTINGS.setSetting("autoFindMovieGenres","true")
autoTune = True
if dlg.yesno("No Channels Configured", "Would you like TV Time to Auto Tune Mix Genre\nchannels the next time it loads?"):
REAL_SETTINGS.setSetting("autoFindMixGenres","true")
autoTune = True
if dlg.yesno("No Channels Configured", "Would you like TV Time to Auto Tune Music Genre\nchannels the next time it loads?"):
REAL_SETTINGS.setSetting("autoFindMusicGenres","true")
autoTune = True
if dlg.yesno("No Channels Configured", "Would you like TV Time to Auto Tune Live\nchannels the next time it loads?"):
REAL_SETTINGS.setSetting("autoFindLive","true")
autoTune = True
if autoTune:
self.end()
return
del dlg
found = False
for i in range(self.maxChannels):
if self.channels[i].isValid:
self.log("Channel " + str(i) + " isValid")
found = True
break
if found == False:
self.Error('Unable to find any channels. \nPlease go to the Addon Settings to configure TV Time.')
return
if self.sleepTimeValue > 0:
self.sleepTimer = threading.Timer(self.sleepTimeValue, self.sleepAction)
# start thread to build prestage channel files in the background
self.prestageThread = PrestageThread()
self.prestageThread.start()
# shutdown check timer
self.shutdownTimer = threading.Timer(1, self.checkShutdownFlag)
self.shutdownTimer.start()
try:
if int(self.forceReset) == 0:
self.currentChannel = self.fixChannel(int(REAL_SETTINGS.getSetting("CurrentChannel")))
else:
self.currentChannel = self.fixChannel(1)
except:
self.currentChannel = self.fixChannel(1)
self.resetChannelTimes()
self.setChannel(self.currentChannel)
self.timeStarted = time.time()
if int(ADDON_SETTINGS.getSetting("Channel_" + str(self.currentChannel) + "_type")) == 8:
self.background.setVisible(False)
xbmc.executebuiltin("ActivateWindow(12006)")
else:
self.background.setVisible(False)
self.log("onInit: startSleepTimer")
self.startSleepTimer()
self.log("onInit: releasing semaphore")
self.actionSemaphore.release()
self.log('Overlay: onInit return')
def checkShutdownFlag(self):
if Globals.userExit == 1:
self.log("Calling TV Time Exit")
self.shutdownTimer.cancel()
self.end()
else:
self.shutdownTimer = threading.Timer(1, self.checkShutdownFlag)
self.shutdownTimer.start()
def createDirectories(self):
self.log("createDirectories")
# setup directories
self.createDirectory(CHANNELS_LOC)
self.createDirectory(GEN_CHAN_LOC)
self.createDirectory(PRESTAGE_LOC)
self.createDirectory(TEMP_LOC)
self.createDirectory(META_LOC)
self.createDirectory(FEED_LOC)
def copySourcesXML(self):
self.log("copySourcesXML")
if not os.path.exists(os.path.join(FEED_LOC,"sources.xml")):
# copy default feeds.xml file
self.channelList.copyFiles(os.path.join(ADDON_INFO, 'resources', 'live'), LIVE_LOC)
def buildMetaFiles(self):
self.dlg = xbmcgui.DialogProgress()
self.dlg.create("TV Time", "Initializing")
progressIndicator = 0
if REAL_SETTINGS.getSetting("bumpers"):
if not os.path.exists(META_LOC + "bumpers.meta"):
# prompt user that we need to build this meta file
self.dlg.update(progressIndicator,"Initializing","Creating Bumper File List")
bumpersfolder = REAL_SETTINGS.getSetting("bumpersfolder")
if len(bumpersfolder) > 0:
self.buildMetaFile("bumpers",bumpersfolder)
if REAL_SETTINGS.getSetting("commercials"):
if not os.path.exists(META_LOC + "commercials.meta"):
# prompt user that we need to build this meta file
self.dlg.update(progressIndicator,"Initializing","Creating Commercial File List")
commercialsfolder = REAL_SETTINGS.getSetting("commercialsfolder")
if len(commercialsfolder) > 0:
self.buildMetaFile("commercials",commercialsfolder)
if REAL_SETTINGS.getSetting("trailers"):
if not os.path.exists(META_LOC + "trailers.meta"):
# prompt user that we need to build this meta file
self.dlg.update(progressIndicator,"Initializing","Creating Trailer File List")
trailersfolder = REAL_SETTINGS.getSetting("trailersfolder")
if len(trailersfolder) > 0:
self.buildMetaFile("trailers",trailersfolder)
self.dlg.close()
def buildMetaFile(self, type, folder):
if (Globals.prestageThreadExit == 0):
self.log("buildMetaFile")
self.videoParser = VideoParser()
flext = [".avi",".mp4",".m4v",".3gp",".3g2",".f4v",".flv",".mkv",".flv"]
metaFileList = []
if os.path.exists(folder):
# get a list of valid filenames from the folder
fnlist = []
for root, subFolders, files in os.walk(folder):
for filename in files:
if (Globals.prestageThreadExit == 0): # pseudo break point to exit thread
# get file extension
basename, extension = os.path.splitext(filename)
if extension in flext: # passed first test
if (Globals.prestageThreadExit == 0):
# get file duration
filepath = os.path.join(root, filename)
dur = self.videoParser.getVideoLength(filepath)
if (dur > 0): # passed second test
# let's parse out some file information
filename_base = []
filename_parts = []
filename_parts2 = []
filename_base = filename.split(".")
filename_parts = filename_base[0].split("_")
filename_parts2 = filename_base[0].split("-")
if len(filename_parts) > len(filename_parts2):
# use filename_parts
title = filename_parts[0]
if len(filename_parts) > 1:
showtitle = filename_parts[1]
else:
showtitle = ""
if len(filename_parts) > 2:
description = filename_parts[2]
else:
description = ""
else:
# use filename_parts2
title = filename_parts2[0]
if len(filename_parts2) > 1:
showtitle = filename_parts2[1]
else:
showtitle = ""
if len(filename_parts2) > 2:
description = filename_parts2[2]
else:
description = ""
metastr = str(filepath) + '|' + str(dur) + '|' + str(title) + '|' + str(showtitle) + '|' + str(description)
metaFileList.append(metastr)
else:
self.abort()
else:
self.abort()
self.writeMetaFile(type, metaFileList)
else:
self.abort()
def writeMetaFile(self, type, metaFileList):
if (Globals.prestageThreadExit == 0):
try:
metafile = open(META_LOC + str(type) + ".meta", "w")
except:
self.Error('Unable to open the meta file ' + META_LOC + str(type) + '.meta', xbmc.LOGERROR)
return False
for file in metaFileList:
metafile.write(file + "\n")
metafile.close()
else:
self.abort()
# setup all basic configuration parameters, including creating the playlists that
# will be used to actually run this thing
def readConfig(self):
self.log('readConfig')
# Sleep setting is in 30 minute incriments...so multiply by 30, and then 60 (min to sec)
self.sleepTimeValue = int(REAL_SETTINGS.getSetting('AutoOff')) * 1800
self.infoOnChange = REAL_SETTINGS.getSetting("InfoOnChange") == "true"
self.showChannelBug = REAL_SETTINGS.getSetting("ShowChannelBug") == "true"
self.forceReset = REAL_SETTINGS.getSetting('ForceChannelReset')
self.channelLogos = xbmc.translatePath(REAL_SETTINGS.getSetting('ChannelLogoFolder'))
if self.channelLogos == "":
self.channelLogos = xbmc.translatePath("special://home/addons/script.tvtime/resources/images/")
if os.path.exists(self.channelLogos) == False:
self.channelLogos = IMAGES_LOC
self.startupTime = time.time()
try:
self.lastResetTime = int(REAL_SETTINGS.getSetting("LastResetTime"))
except:
self.lastResetTime = 0
# Output all settings for debugging purposes
self.log('#####################################################################################')
self.log('General Settings:')
self.log(' Auto off is - ' + str(REAL_SETTINGS.getSetting('AutoOff')))
self.log(' Show info label on channel change is - ' + str(REAL_SETTINGS.getSetting('InfoOnChange') == "true"))
self.log(' Force Channel Reset is - ' + str(REAL_SETTINGS.getSetting('ForceChannelReset')))
self.log(' Auto Channel Reset is - ' + str(REAL_SETTINGS.getSetting('autoChannelReset') == "true"))
self.log(' Auto Channel Reset Setting is - ' + str(REAL_SETTINGS.getSetting('autoChannelResetSetting')))
self.log(' Auto Channel Reset Interval is - ' + str(REAL_SETTINGS.getSetting('autoChannelResetInterval')))
self.log(' Auto Channel Reset Time is - ' + str(REAL_SETTINGS.getSetting('autoChannelResetTime')))
self.log(' Auto Channel Reset Shutdown is - ' + str(REAL_SETTINGS.getSetting('autoChannelResetShutdown') == "true"))
self.log(' Show Channel Bug is - ' + str(REAL_SETTINGS.getSetting('ShowChannelBug') == "true"))
self.log(' Channel Logo Folder is - ' + str(REAL_SETTINGS.getSetting('ChannelLogoFolder')))
self.log(' Version is - ' + str(REAL_SETTINGS.getSetting('Version')))
self.log('Channels Settings:')
self.log(' Auto Find TV Network Channels is - ' + str(REAL_SETTINGS.getSetting('autoFindNetworks')))
self.log(' Auto Find Movie Studios Channels is - ' + str(REAL_SETTINGS.getSetting('autoFindStudios')))
self.log(' Auto Find TV Genres Channels is - ' + str(REAL_SETTINGS.getSetting('autoFindTVGenres')))
self.log(' Auto Find Movie Genres Channels is - ' + str(REAL_SETTINGS.getSetting('autoFindMovieGenres')))
self.log(' Auto Find Mixed Genres Channels is - ' + str(REAL_SETTINGS.getSetting('autoFindMixGenres')))
self.log(' Auto Find Music Genres Channels is - ' + str(REAL_SETTINGS.getSetting('autoFindMusicGenres')))
self.log(' Auto Find Live Channels is - ' + str(REAL_SETTINGS.getSetting('autoFindLive')))
self.log(' Channel Limit is - ' + str(REAL_SETTINGS.getSetting('limit')))
self.log('Off Air Settings:')
self.log(' Off Air Mode is - ' + str(REAL_SETTINGS.getSetting('offair') == "true" ))
self.log(' Off Air File is - ' + str(REAL_SETTINGS.getSetting('offairfile')))
self.log('Bumpers Settings:')
self.log(' Bumpers Mode is - ' + str(REAL_SETTINGS.getSetting('bumpers') == "true" ))
self.log(' Bumpers Folder is - ' + str(REAL_SETTINGS.getSetting('bumpersfolder')))
self.log(' Number of Bumpers is - ' + str(REAL_SETTINGS.getSetting('numbumpers')))
self.log(' Max Number of Bumpers is - ' + str(REAL_SETTINGS.getSetting('maxbumpers')))
self.log('Commercials Settings:')
self.log(' Commercials Mode is - ' + str(REAL_SETTINGS.getSetting('commercials') == "true" ))
self.log(' Commercials Folder is - ' + str(REAL_SETTINGS.getSetting('commercialsfolder')))
self.log(' Number of Commercials is - ' + str(REAL_SETTINGS.getSetting('numcommercials')))
self.log(' Max Number of Commercials is - ' + str(REAL_SETTINGS.getSetting('maxcommercials')))
self.log('Trailers Settings:')
self.log(' Trailers Mode is - ' + str(REAL_SETTINGS.getSetting('trailers') == "true" ))
self.log(' Trailers Folder is - ' + str(REAL_SETTINGS.getSetting('trailersfolder')))
self.log(' Number of Trailers is - ' + str(REAL_SETTINGS.getSetting('numtrailers')))
self.log(' Max Number of Trailers is - ' + str(REAL_SETTINGS.getSetting('maxtrailers')))
self.log('Runtime Settings:')
self.log(' Current Channel is - ' + str(REAL_SETTINGS.getSetting('CurrentChannel')))
self.log(' Last Reset Time is - ' + str(REAL_SETTINGS.getSetting('LastResetTime')))
self.log(' Next Auto Reset Date/Time is - ' + str(REAL_SETTINGS.getSetting('nextAutoResetDateTime')))
self.log(' Next Auto Reset Time Interval is - ' + str(REAL_SETTINGS.getSetting('nextAutoResetDateTimeInterval')))
self.log(' Next Auto Reset Hour is - ' + str(REAL_SETTINGS.getSetting('nextAutoResetDateTimeResetTime')))
self.log('#####################################################################################')
self.log('readConfig return')
return True
def loadChannels(self):
self.log('loadChannels')
self.background.setVisible(True)
self.channels = self.channelList.setupList()
if self.channels is None:
self.log('loadChannels: No channel list returned')
self.log("loadChannels: calling end")
self.end()
return False
self.Player.stop()
return True
def channelDown(self):
self.log('channelDown')
if self.maxChannels == 1:
return
self.background.setVisible(True)
channel = self.fixChannel(self.currentChannel - 1, False)
self.setChannel(channel)
if int(ADDON_SETTINGS.getSetting("Channel_" + str(self.currentChannel) + "_type")) == 8:
self.background.setVisible(False)
xbmc.executebuiltin("ActivateWindow(12006)")
else:
self.background.setVisible(False)
self.log('channelDown return')
def channelUp(self):
self.log('channelUp')
if self.maxChannels == 1:
return
self.background.setVisible(True)
channel = self.fixChannel(self.currentChannel + 1)
self.setChannel(channel)
if int(ADDON_SETTINGS.getSetting("Channel_" + str(self.currentChannel) + "_type")) == 8:
self.background.setVisible(False)
xbmc.executebuiltin("ActivateWindow(12006)")
else:
self.background.setVisible(False)
self.log('channelUp return')
# set the channel, the proper show offset, and time offset
def setChannel(self, channel):
self.log('setChannel ' + str(channel))
if channel < 1 or channel > self.maxChannels:
self.log('setChannel invalid channel ' + str(channel), xbmc.LOGERROR)
return
if self.channels[channel - 1].isValid == False:
self.log('setChannel channel not valid ' + str(channel), xbmc.LOGERROR)
return
self.lastActionTime = 0
timedif = 0
self.getControl(102).setVisible(False)
self.showingInfo = False
# first of all, save playing state, time, and playlist offset for
# the currently playing channel
if self.Player.isPlaying():
if channel != self.currentChannel:
self.channels[self.currentChannel - 1].setPaused(xbmc.getCondVisibility('Player.Paused'))
# Automatically pause in serial mode
#if self.channels[self.currentChannel - 1].mode & MODE_ALWAYSPAUSE > 0:
# self.channels[self.currentChannel - 1].setPaused(True)
self.channels[self.currentChannel - 1].setShowTime(self.Player.getTime())
self.channels[self.currentChannel - 1].setShowPosition(xbmc.PlayList(xbmc.PLAYLIST_MUSIC).getposition())
self.channels[self.currentChannel - 1].setAccessTime(time.time())
self.currentChannel = channel
# now load the proper channel playlist
xbmc.PlayList(xbmc.PLAYLIST_MUSIC).clear()
if xbmc.PlayList(xbmc.PLAYLIST_MUSIC).load(self.channels[channel - 1].fileName) == False:
self.log("Error loading playlist")
self.InvalidateChannel(channel)
return
# Disable auto playlist shuffling if it's on
if xbmc.getInfoLabel('Playlist.Random').lower() == 'random':
self.log('Random on. Disabling.')
xbmc.PlayList(xbmc.PLAYLIST_MUSIC).unshuffle()
xbmc.executebuiltin("self.PlayerControl(repeatall)")
timedif += (time.time() - self.channels[self.currentChannel - 1].lastAccessTime)
# adjust the show and time offsets to properly position inside the playlist
while self.channels[self.currentChannel - 1].showTimeOffset + timedif > self.channels[self.currentChannel - 1].getCurrentDuration():
timedif -= self.channels[self.currentChannel - 1].getCurrentDuration() - self.channels[self.currentChannel - 1].showTimeOffset
self.channels[self.currentChannel - 1].addShowPosition(1)
self.channels[self.currentChannel - 1].setShowTime(0)
# set the show offset
self.Player.playselected(self.channels[self.currentChannel - 1].playlistPosition)
# set the time offset
self.channels[self.currentChannel - 1].setAccessTime(time.time())
if self.channels[self.currentChannel - 1].isPaused:
self.channels[self.currentChannel - 1].setPaused(False)
try:
self.Player.seekTime(self.channels[self.currentChannel - 1].showTimeOffset)
if self.channels[self.currentChannel - 1].mode & MODE_ALWAYSPAUSE == 0:
self.Player.pause()
if self.waitForVideoPaused() == False:
return
except:
self.log('Exception during seek on paused channel', xbmc.LOGERROR)
else:
seektime = self.channels[self.currentChannel - 1].showTimeOffset + timedif
try:
self.Player.seekTime(seektime)
except:
self.log('Exception during seek', xbmc.LOGERROR)
self.showChannelLabel(self.currentChannel)
self.lastActionTime = time.time()
self.log('setChannel return')
def InvalidateChannel(self, channel):
self.log("InvalidateChannel" + str(channel))
if channel < 1 or channel > self.maxChannels:
self.log("InvalidateChannel invalid channel " + str(channel))
return
self.channels[channel - 1].isValid = False
self.invalidatedChannelCount += 1
if self.invalidatedChannelCount > 3:
self.Error("Exceeded 3 invalidated channels. Exiting.")
return
remaining = 0
for i in range(self.maxChannels):
if self.channels[i].isValid:
remaining += 1
if remaining == 0:
self.Error("No channels available. Exiting.")
return
self.setChannel(self.fixChannel(channel))
def waitForVideoPaused(self):
self.log('waitForVideoPaused')
sleeptime = 0
while sleeptime < TIMEOUT:
xbmc.sleep(100)
if self.Player.isPlaying():
if xbmc.getCondVisibility('Player.Paused'):
break
sleeptime += 100
else:
self.log('Timeout waiting for pause', xbmc.LOGERROR)
return False
self.log('waitForVideoPaused return')
return True
def setShowInfo(self):
self.log('setShowInfo')
if self.infoOffset > 0:
self.getControl(502).setLabel('COMING UP:')
elif self.infoOffset < 0:
self.getControl(502).setLabel('ALREADY SEEN:')
elif self.infoOffset == 0:
self.getControl(502).setLabel('NOW WATCHING:')
position = xbmc.PlayList(xbmc.PLAYLIST_VIDEO).getposition() + self.infoOffset
self.getControl(503).setLabel(self.channels[self.currentChannel - 1].getItemTitle(position))
self.getControl(504).setLabel(self.channels[self.currentChannel - 1].getItemEpisodeTitle(position))
self.getControl(505).setLabel(self.channels[self.currentChannel - 1].getItemDescription(position))
self.getControl(506).setImage(self.channelLogos + self.channels[self.currentChannel - 1].name + '.png')
self.log('setShowInfo return')
# Display the current channel based on self.currentChannel.
# Start the timer to hide it.
def showChannelLabel(self, channel):
self.log('showChannelLabel ' + str(channel))
if self.channelLabelTimer.isAlive():
self.channelLabelTimer.cancel()
self.channelLabelTimer = threading.Timer(5.0, self.hideChannelLabel)
tmp = self.inputChannel
#self.hideChannelLabel()
self.inputChannel = tmp
curlabel = 0
if channel > 99:
self.channelLabel[curlabel].setImage(IMAGES_LOC + 'label_' + str(channel // 100) + '.png')
self.channelLabel[curlabel].setVisible(True)
curlabel += 1
if channel > 9:
self.channelLabel[curlabel].setImage(IMAGES_LOC + 'label_' + str((channel % 100) // 10) + '.png')
self.channelLabel[curlabel].setVisible(True)
curlabel += 1
self.channelLabel[curlabel].setImage(IMAGES_LOC + 'label_' + str(channel % 10) + '.png')
self.channelLabel[curlabel].setVisible(True)
##ADDED BY SRANSHAFT: USED TO SHOW NEW INFO WINDOW WHEN CHANGING CHANNELS
if self.inputChannel == -1 and self.infoOnChange == True:
if int(ADDON_SETTINGS.getSetting("Channel_" + str(self.currentChannel) + "_type")) == 8:
self.background.setVisible(False)
xbmc.executebuiltin("ActivateWindow(12006)")
else:
self.infoOffset = 0
self.showInfo(5.0)
if self.showChannelBug == True:
try:
self.getControl(103).setImage(self.channelLogos + self.channels[self.currentChannel - 1].name + '.png')
except:
pass
##
self.channelLabelTimer.start()
self.log('showChannelLabel return')
# Called from the timer to hide the channel label.
def hideChannelLabel(self):
self.log('hideChannelLabel')
self.channelLabelTimer = threading.Timer(5.0, self.hideChannelLabel)
for i in range(3):
self.channelLabel[i].setVisible(False)
self.inputChannel = -1
self.log('hideChannelLabel return')
def hideInfo(self):
self.getControl(102).setVisible(False)
self.infoOffset = 0
self.showingInfo = False
if self.infoTimer.isAlive():
self.infoTimer.cancel()
self.infoTimer = threading.Timer(5.0, self.hideInfo)
def showInfo(self, timer):
self.getControl(102).setVisible(True)
self.showingInfo = True
self.setShowInfo()
if self.infoTimer.isAlive():
self.infoTimer.cancel()
self.infoTimer = threading.Timer(timer, self.hideInfo)
self.infoTimer.start()
# return a valid channel in the proper range
def fixChannel(self, channel, increasing = True):
while channel < 1 or channel > self.maxChannels:
if channel < 1: channel = self.maxChannels + channel
if channel > self.maxChannels: channel -= self.maxChannels
if increasing:
direction = 1
else:
direction = -1
if self.channels[channel - 1].isValid == False:
return self.fixChannel(channel + direction, increasing)
return channel
# Handle all input while videos are playing
def onAction(self, act):
action = act.getId()
self.log('onAction ' + str(action))
# Since onAction isnt always called from the same thread (weird),
# ignore all actions if we're in the middle of processing one
self.log("acquiring semaphore")
if self.actionSemaphore.acquire(False) == False:
self.log('onAction: Unable to get semaphore')
return
else:
lastaction = time.time() - self.lastActionTime
# during certain times we just want to discard all input
if lastaction < 2:
# unless it is an exit action
if action == ACTION_STOP:
Globals.userExit = 1
self.log("Exiting because user pressed exit")
#self.end()
else:
self.log('Not allowing actions')
action = ACTION_INVALID
self.log("onAction: startSleepTimer")
self.startSleepTimer()
if action == ACTION_SELECT_ITEM:
# If we're manually typing the channel, set it now
if self.inputChannel > 0:
if self.inputChannel != self.currentChannel:
self.setChannel(self.inputChannel)
if int(ADDON_SETTINGS.getSetting("Channel_" + str(self.inputChannel) + "_type")) == 8:
self.background.setVisible(False)
xbmc.executebuiltin("ActivateWindow(12006)")
else:
self.background.setVisible(False)
self.inputChannel = -1
else:
# Otherwise, show the EPG
if self.sleepTimeValue > 0:
if self.sleepTimer.isAlive():
self.sleepTimer.cancel()
self.sleepTimer = threading.Timer(self.sleepTimeValue, self.sleepAction)
self.hideInfo()
self.newChannel = 0
self.myEPG.doModal()
if self.newChannel != 0:
self.background.setVisible(True)
self.setChannel(self.newChannel)
if int(ADDON_SETTINGS.getSetting("Channel_" + str(self.currentChannel) + "_type")) == 8:
self.background.setVisible(False)
xbmc.executebuiltin("ActivateWindow(12006)")
else:
self.background.setVisible(False)
elif action == ACTION_MOVE_UP or action == ACTION_PAGEUP:
self.channelUp()
elif action == ACTION_MOVE_DOWN or action == ACTION_PAGEDOWN:
self.channelDown()
elif action == ACTION_MOVE_LEFT:
if int(ADDON_SETTINGS.getSetting("Channel_" + str(self.currentChannel) + "_type")) == 8:
self.background.setVisible(False)
xbmc.executebuiltin("ActivateWindow(12006)")
else:
if self.showingInfo:
self.infoOffset -= 1
self.showInfo(10.0)
elif action == ACTION_MOVE_RIGHT:
if int(ADDON_SETTINGS.getSetting("Channel_" + str(self.currentChannel) + "_type")) == 8:
self.background.setVisible(False)
xbmc.executebuiltin("ActivateWindow(12006)")
else:
if self.showingInfo:
self.infoOffset += 1
self.showInfo(10.0)
elif action == ACTION_PREVIOUS_MENU:
if self.showingInfo:
self.hideInfo()
else:
dlg = xbmcgui.Dialog()
if self.sleepTimeValue > 0:
if self.sleepTimer.isAlive():
self.sleepTimer.cancel()
if dlg.yesno("Exit?", "Are you sure you want to exit TV Time?"):
Globals.userExit = 1
self.log("Exiting because user selected yes")
#self.end()
else:
self.sleepTimer = threading.Timer(self.sleepTimeValue, self.sleepAction)
self.startSleepTimer()
del dlg
elif action == ACTION_SHOW_INFO:
if int(ADDON_SETTINGS.getSetting("Channel_" + str(self.currentChannel) + "_type")) == 8:
self.background.setVisible(False)
xbmc.executebuiltin("ActivateWindow(12006)")
else:
if self.showingInfo:
self.hideInfo()
else:
self.showInfo(10.0)
elif action >= ACTION_NUMBER_0 and action <= ACTION_NUMBER_9:
if self.inputChannel < 0:
self.inputChannel = action - ACTION_NUMBER_0
else:
if self.inputChannel < 100:
self.inputChannel = self.inputChannel * 10 + action - ACTION_NUMBER_0
self.showChannelLabel(self.inputChannel)
elif action == ACTION_OSD:
xbmc.executebuiltin("ActivateWindow(12901)")
elif action == ACTION_STOP:
Globals.userExit = 1
self.log("Exiting because user pressed exit")
#self.end()
self.log("onAction: releasing semaphore")
self.actionSemaphore.release()
self.log('onAction return')
# Reset the sleep timer
def startSleepTimer(self):
if self.sleepTimeValue == 0:
return
# Cancel the timer if it is still running
if self.sleepTimer.isAlive():
self.sleepTimer.cancel()
# resetting sleep time value
self.sleepTimeValue = int(REAL_SETTINGS.getSetting('AutoOff')) * 1800
self.sleepTimer = threading.Timer(self.sleepTimeValue, self.sleepAction)
self.sleepTimer.start()
# This is called when the sleep timer expires
def sleepAction(self):
self.log("sleepAction: acquiring semaphore")
# TODO: show some dialog, allow the user to cancel the sleep
# perhaps modify the sleep time based on the current show
self.log("sleepAction: calling end")
self.end()
# cleanup and end
def end(self):
self.log("end")
self.background.setVisible(True)
# add a control to block script from calling end twice
# unsure why it does sometimes
if Globals.exitingTVTime == 0:
Globals.exitingTVTime = 1
self.log('EXITING TV TIME')
# trigger prestage thread to exit
self.log("end: triggering prestage thread to exit")
Globals.prestageThreadExit = 1
# wait a few seconds to allow script to exit threads, etc.
self.dlg = xbmcgui.DialogProgress()
self.dlg.create("TV Time", "Exiting")
self.dlg.update(0,"Exiting TV Time","Please wait...")
time.sleep(3)
# shutdown check timer
self.shutdownTimer = threading.Timer(1, self.checkShutdownFlag)
self.shutdownTimer.start()
try:
if self.shutdownTimer.isAlive():
self.log("shutdownTimer is still alive")
self.shutdownTimer.cancel()
self.log("channelLabelTimer is cancelled")
except:
self.log("error cancelling shutdownTimer")
pass
try:
if self.channelLabelTimer.isAlive():
self.log("channelLabelTimer is still alive")
self.channelLabelTimer.cancel()
self.log("channelLabelTimer is cancelled")
except:
self.log("error cancelling channelLabelTimer")
pass
try:
if self.infoTimer.isAlive():
self.log("infoTimer is still alive")
self.infoTimer.cancel()
self.log("infoTimer is cancelled")
except:
self.log("error cancelling infoTimer")
pass
try:
if self.sleepTimeValue > 0:
if self.sleepTimer.isAlive():
self.log("sleepTimer is still alive")
self.sleepTimer.cancel()
self.log("sleepTimer is cancelled")
except:
self.log("error cancelling sleepTimer")
pass
#if self.autoResetTimer > 0:
try:
if self.autoResetTimer.isAlive():
self.log("autoResetTimer is still alive")
self.autoResetTimer.cancel()
self.log("autoResetTimer is cancelled")
except:
self.log("error cancelling autoResetTimer")
pass
if self.Player.isPlaying():
self.Player.stop()
if self.timeStarted > 0 and int(Globals.channelsReset) == 0:
# for i in range(self.maxChannels):
for i in range(int(REAL_SETTINGS.getSetting("maxChannels"))):
if self.channels[i].isValid:
if self.channels[i].mode & MODE_RESUME == 0:
ADDON_SETTINGS.setSetting('Channel_' + str(i + 1) + '_time', str(int(time.time() - self.timeStarted + self.channels[i].totalTimePlayed)))
else:
tottime = 0
for j in range(self.channels[i].playlistPosition):
tottime += self.channels[i].getItemDuration(j)
tottime += self.channels[i].showTimeOffset
if i == self.currentChannel - 1:
tottime += (time.time() - self.channels[i].lastAccessTime)
ADDON_SETTINGS.setSetting('Channel_' + str(i + 1) + '_time', str(int(tottime)))
ADDON_SETTINGS.writeSettings()
try:
self.log("saving current channel " + str(self.currentChannel))
REAL_SETTINGS.setSetting('CurrentChannel', str(self.currentChannel))
except:
self.log("unable to save current channel " + str(self.currentChannel))
pass
# wait while settings file is being written to
# settings2.xml wasn't being completely written to
# before script would end
while int(Globals.savingSettings) == 1:
self.dlg.update(25,"Exiting TV Time","Waiting on settings to be saved...")
pass
self.dlg.update(50,"Exiting TV Time","Please wait...")
time.sleep(3)
self.dlg.close()
ADDON_SETTINGS.setSetting('LastExitTime', str(int(time.time())))
self.background.setVisible(False)
# need to distinguish between user eXits and auto shutdown
if int(Globals.userExit) == 0 and REAL_SETTINGS.getSetting("autoChannelResetShutdown") == "true":
#print xbmc.executeJSONRPC('{"jsonrpc": "2.0", "method": "JSONRPC.Introspect", "id": 1}')
#XBMC.Quit
self.log("Threads - " + str(threading.enumerate()))
self.log("Exiting XBMC")
json_query = '{"jsonrpc": "2.0", "method": "XBMC.Quit", "id": 1}'
xbmc.executeJSONRPC(json_query)
#self.close()
else:
self.log("Threads - " + str(threading.enumerate()))
self.close()
else:
self.log("TVTime already triggered end")
#####################################################
#####################################################
#
# Channel Reset Functions
#
#####################################################
#####################################################
# rebuild filelists
def forceChannelReset(self, channel):
self.log('forceChannelReset: Channel ' + str(channel))
self.channels = []
if channel == "all":
# reset all channels
# we only want one reset occuring at a time so let's put a check in
if Globals.forceChannelResetActive == 0:
Globals.forceChannelResetActive = 1
REAL_SETTINGS.setSetting('LastResetTime', str( int ( time.time() ) ) )
# if force reset, delete all cache files
self.channelList.deleteFiles(CHANNELS_LOC)
# if force reset, delete all prestage files
self.channelList.deleteFiles(PRESTAGE_LOC)
# call function to rebuild all channel file lists
self.channelList.buildChannelFileList(CHANNELS_LOC, "all")
# reset finished
Globals.channelsReset = 1
Globals.forceChannelResetActive = 0
else:
pass
else:
# only reset the channel passed
if Globals.forceChannelResetActive == 0:
Globals.forceChannelResetActive = 1
filename = "Channel_" + str(channel) + ".m3u"
REAL_SETTINGS.setSetting('LastResetTime', str(int(time.time())))
# delete cache file
if os.path.exists(os.path.join(CHANNELS_LOC, filename)):
os.remove(os.path.join(CHANNELS_LOC, filename))
# delete prestage files
if os.path.exists(os.path.join(PRESTAGE_LOC, filename)):
os.remove(os.path.join(PRESTAGE_LOC, filename))
# call function to rebuild channel file lists
self.channelList.buildChannelFileList(CHANNELS_LOC, channel)
# reset finished
Globals.channelsReset = 1
Globals.forceChannelResetActive = 0
def resetLiveChannels(self):
self.dlg = xbmcgui.DialogProgress()
self.dlg.create("TV Time", "Updating Live Channels")
progressIndicator = 0
self.dlg.update(progressIndicator,"Updating Live Channels")
channel = 0
maxChannels = REAL_SETTINGS.getSetting("maxChannels")
for i in range(int(maxChannels)):
channel = channel + 1
if int(ADDON_SETTINGS.getSetting("Channel_" + str(channel) + "_type")) == 9:
chname = ADDON_SETTINGS.getSetting("Channel_" + str(channel) + "_3")
progressIndicator = (int(channel) / int(maxChannels)) * 100
self.dlg.update(progressIndicator,"Updating Live Channels","Updating Channel " + str(channel) + " - " + str(chname))
self.channelList.buildChannelFileList(CHANNELS_LOC, channel)
self.dlg.close()
# check if auto reset times have expired
def checkAutoChannelReset(self):
needsreset = False
"""
autoChannelResetSetting
values:
0 = automatic
1 = each day
2 = each week
3 = each month
4 = scheduled
"""
autoChannelResetSetting = int(REAL_SETTINGS.getSetting("autoChannelResetSetting"))
if autoChannelResetSetting == "":
autoChannelResetSetting = 0
self.log("autoChannelResetSetting " + str(autoChannelResetSetting))
"""
if autoChannelResetSetting is set to automatic
loop through all channels to get their totalduration and time values
if total time played for the channel is greater than total duration
watched since last auto reset, then set needsreset flag to true
"""
if autoChannelResetSetting == 0:
# need to get channel settings
self.channels = []
needsreset = False
# loop through channel settings to get
# totalTimePlayed
# totalDuration
for i in range(int(REAL_SETTINGS.getSetting("maxChannels"))):
if not ADDON_SETTINGS.getSetting("Channel_" + str(i+1) + "_offair") == "1":
# need to figure out how to store
totalTimePlayed = ADDON_SETTINGS.getSetting("Channel_" + str(i+1) + "_time")
if totalTimePlayed == "":
totalTimePlayed = 0
self.log("Channel_" + str(i+1) + "_time " + str(totalTimePlayed))
totalDuration = ADDON_SETTINGS.getSetting("Channel_" + str(i+1) + "_totalDuration")
if totalDuration == "":
totalDuration = 0
self.log("Channel_" + str(i+1) + "_totalDuration " + str(totalDuration))
if int(totalTimePlayed) > int(totalDuration):
needsreset = True
if needsreset:
REAL_SETTINGS.setSetting('LastResetTime', str(int(time.time())))
elif autoChannelResetSetting > 0 and autoChannelResetSetting < 4: # each day, each week, each month
try:
self.lastResetTime = int(REAL_SETTINGS.getSetting("LastResetTime"))
except:
self.lastResetTime = 0
timedif = time.time() - self.lastResetTime
if int(autoChannelResetSetting) == 1 and timedif > (60 * 60 * 24):
needsreset = True
if int(autoChannelResetSetting) == 2 and timedif > (60 * 60 * 24 * 7):
needsreset = True
if int(autoChannelResetSetting) == 3 and timedif > (60 * 60 * 24 * 30):
needsreset = True
if timedif < 0:
needsreset = True
if needsreset:
REAL_SETTINGS.setSetting('LastResetTime', str(int(time.time())))
elif autoChannelResetSetting == 4: # scheduled
"""
if autoChannelResetSetting = 4,
set next reset date/time,
set timer until next reset date/time,
start auto reset timer
"""
if REAL_SETTINGS.getSetting('nextAutoResetDateTime') == "":
self.setNextAutoResetTime()
elif REAL_SETTINGS.getSetting('nextAutoResetDateTimeInterval') <> REAL_SETTINGS.getSetting('autoChannelResetInterval'):
self.setNextAutoResetTime()
elif REAL_SETTINGS.getSetting('nextAutoResetDateTimeResetTime') <> REAL_SETTINGS.getSetting('autoChannelResetTime'):
self.setNextAutoResetTime()
# set auto reset timer
self.setAutoResetTimer()
# start auto reset timer
self.startAutoResetTimer()
return needsreset
def setNextAutoResetTime(self):
# set next auto resetChannel time
# need to get current datetime in local time
currentDateTimeTuple = localtime()
# parse out year, month and day so we can computer resetDate
cd = datetime.datetime(*(currentDateTimeTuple[0:6]))
year = cd.strftime('%Y')
month = cd.strftime('%m')
day = cd.strftime('%d')
hour = cd.strftime('%H')
minutes = cd.strftime('%M')
seconds = cd.strftime('%S')
# convert to date object so we can add timedelta in the next step
currentDateTime = year + "-" + month + "-" + day + " " + hour + ":" + minutes + ":" + seconds
currentDateTimeTuple = strptime(currentDateTime,"%Y-%m-%d %H:%M:%S")
currentDate = date(int(year), int(month), int(day))
# need to get setting of when to auto reset
# Daily|Weekly|Monthly
# 0 = Daily
# 1 = Weekly
# 2 = Monthly
# Daily = Current Date + 1 Day
# Weekly = CUrrent Date + 1 Week
# Monthly = CUrrent Date + 1 Month
resetInterval = REAL_SETTINGS.getSetting("autoChannelResetInterval")
# Time to Reset: 12:00am, 1:00am, 2:00am, etc.
# get resetTime setting
resetTime = REAL_SETTINGS.getSetting("autoChannelResetTime")
if resetInterval == "0":
# Daily
interval = timedelta(days=1)
elif resetInterval == "1":
# Weekly
interval = timedelta(days=7)
elif resetInterval == "2":
# Monthly
interval = timedelta(days=30)
# determine resetDate based on current date and interval
if resetTime > hour and resetInterval == "0":
resetDate = currentDate
else:
resetDate = currentDate + interval
# need to convert to tuple to be able to parse out components
resetDateTuple = strptime(str(resetDate), "%Y-%m-%d")
# parse out year, month, and day
rd = datetime.datetime(*(resetDateTuple[0:3]))
year = rd.strftime('%Y')
month = rd.strftime('%m')
day = rd.strftime('%d')
# set hour, minutes and seconds
hour = resetTime
minutes = 0
seconds = 0
# join components together to form reset date and time
resetDateTime = str(year) + "-" + str(month) + "-" + str(day) + " " + str(hour) + ":" + str(minutes) + ":" + str(seconds)
# save next resetDateTime to settings
REAL_SETTINGS.setSetting('nextAutoResetDateTime', str(resetDateTime))
REAL_SETTINGS.setSetting('nextAutoResetDateTimeInterval', str(resetInterval))
REAL_SETTINGS.setSetting('nextAutoResetDateTimeResetTime', str(resetTime))
def setAutoResetTimer(self):
# set next auto resetChannel time
# need to get current datetime in local time
currentDateTimeTuple = localtime()
nextAutoResetDateTime = REAL_SETTINGS.getSetting('nextAutoResetDateTime')
nextAutoResetDateTimeTuple = strptime(nextAutoResetDateTime,"%Y-%m-%d %H:%M:%S")
# need to get difference between the two
self.autoResetTimeValue = mktime(nextAutoResetDateTimeTuple) - mktime(currentDateTimeTuple)
self.log("Next auto reset will occur in " + str(self.autoResetTimeValue) + " seconds")
# set timer
self.autoResetTimer = threading.Timer(self.autoResetTimeValue, self.autoChannelReset)
# Reset the sleep timer
def startAutoResetTimer(self):
if self.autoResetTimeValue == 0:
return
# Cancel the auto reset timer if it is still running
if self.autoResetTimer.isAlive():
self.autoResetTimer.cancel()
self.autoResetTimer = threading.Timer(self.resetTimerValue, self.autoChannelReset)
self.autoResetTimer.start()
def autoChannelReset(self):
self.log("autoChannelReset")
# need to allow user to abort the channel reset
self.resetDialog = xbmcgui.DialogProgress()
self.resetDialog.create("TV Time", "Preparing for Auto Channel Reset")
self.resetDialog.update(0, "Preparing for Auto Channel Reset")
if self.resetDialog.iscanceled():
self.log("autoResetChannels: auto channel reset Cancelled")
self.resetDialog.close()
return False
progressPercentage = 0
for count in self.countdown(10):
progressPercentage = progressPercentage + 10
self.resetDialog.update(progressPercentage, "Preparing for Auto Channel Reset")
self.resetDialog.close()
if not self.resetDialog.iscanceled():
if Globals.autoResetChannelActive == 0:
# trigger prestage thread to exit
Globals.prestageThreadExit = 1
# block any attempt to run concurrent auto channel resets
Globals.autoResetChannelActive = 1
self.log("autoChannelReset: reset started")
# reset started
REAL_SETTINGS.setSetting('LastResetTime', str( int ( time.time() ) ) )
# delete previous files in the cache
self.log("autoChannelReset: delete previous files in the cache")
self.channelList.deleteFiles(CHANNELS_LOC)
# copy pre-staged channel file lists to cache
self.log("autoChannelReset: copying prestaged files to the cache")
self.channelList.copyFiles(PRESTAGE_LOC, CHANNELS_LOC)
# reset next auto reset time
self.setNextAutoResetTime()
try:
if self.channelLabelTimer.isAlive():
self.channelLabelTimer.cancel()
if self.infoTimer.isAlive():
self.infoTimer.cancel()
if self.sleepTimer.isAlive():
self.sleepTimer.cancel()
if self.autoResetTimer.isAlive():
self.autoResetTimer.cancel()
except:
pass
if xbmc.Player().isPlaying():
xbmc.Player().stop()
# reset channel times
if self.timeStarted > 0:
for i in range(int(REAL_SETTINGS.getSetting("maxChannels"))):
if self.channels[i].isValid:
#ADDON_SETTINGS.setSetting('Channel_' + str(i + 1) + '_time', str(int(time() - self.timeStarted + self.channels[i].totalTimePlayed)))
channel = i + 1
ADDON_SETTINGS.setSetting("Channel_" + str(channel) + "_time","0")
totalDuration = self.channelList.getTotalDuration(channel,CHANNELS_LOC)
ADDON_SETTINGS.setSetting("Channel_" + str(channel) + "_totalDuration",str(totalDuration))
try:
ADDON_SETTINGS.setSetting('CurrentChannel', str(self.currentChannel))
except:
pass
ADDON_SETTINGS.writeSettings()
Globals.channelsReset = 1
Globals.autoResetChannelActive = 0
# need to find right way to re initialize the script
# reload channels
# update EPC and restart
autoChannelResetSetting = int(REAL_SETTINGS.getSetting("autoChannelResetSetting"))
if autoChannelResetSetting > 0 and autoChannelResetSetting < 5:
if REAL_SETTINGS.getSetting("autoChannelResetShutdown") == "false":
self.log("Restarting TV Time")
self.__init__()
else:
self.log("Exiting because auto channel reset shutdown")
self.end()
#####################################################
#####################################################
#
# Utility Functions
#
#####################################################
#####################################################
def log(self, msg, level = xbmc.LOGDEBUG):
log('TVOverlay: ' + msg, level)
def createDirectory(self, directory):
if not os.path.exists(directory):
try:
os.makedirs(directory)
except:
self.Error('Unable to create the directory - ' + str(directory))
return
# handle fatal errors: log it, show the dialog, and exit
def Error(self, message):
self.log('FATAL ERROR: ' + message, xbmc.LOGFATAL)
dlg = xbmcgui.Dialog()
dlg.ok('Error', message)
del dlg
self.log("Error: calling end")
self.end()
def message(self, data):
log('Dialog message: ' + data)
dlg = xbmcgui.Dialog()
dlg.ok('Info', data)
del dlg
def countdown(self, secs, interval=1):
while secs > 0:
yield secs
secs = secs - 1
sleep(interval)
| gpl-3.0 | 426,476,372,236,932,100 | 43.739074 | 240 | 0.563534 | false |
maxpinto/Ptz | bootcamp/auth/forms.py | 1 | 3750 | from django import forms
from django.core.exceptions import ValidationError
from django.contrib.auth.models import User
from bootcamp.settings import ALLOWED_SIGNUP_DOMAINS
def Validardominio(value):
if '*' not in ALLOWED_SIGNUP_DOMAINS:
try:
dominio = value[value.index("@"):]
if dominio not in ALLOWED_SIGNUP_DOMAINS:
raise ValidationError(u'Dominio no valido, esta aplicacion actualmente es solo para direcciones de correo especificas {0}'.format(','.join(ALLOWED_SIGNUP_DOMAINS)))
except Exception, e:
raise ValidationError(u'Dominio no valido {0}'.format(','.join(ALLOWED_SIGNUP_DOMAINS)))
def Palabras_reservadas(value):
listado = ['admin', 'settings', 'news', 'about', 'help', 'signin', 'signup',
'signout', 'terms', 'privacy', 'cookie', 'new', 'login', 'logout', 'administrator',
'join', 'account', 'username', 'root', 'blog', 'user', 'users', 'billing', 'subscribe',
'reviews', 'review', 'blog', 'blogs', 'edit', 'mail', 'email', 'home', 'job', 'jobs',
'contribute', 'newsletter', 'shop', 'profile', 'register', 'auth', 'authentication',
'campaign', 'config', 'delete', 'remove', 'forum', 'forums', 'download', 'downloads',
'contact', 'blogs', 'feed', 'feeds', 'faq', 'intranet', 'log', 'registration', 'search',
'explore', 'rss', 'support', 'status', 'static', 'media', 'setting', 'css', 'js',
'follow', 'activity', 'questions', 'articles', 'network',]
if value.lower() in listado:
raise ValidationError('Esta es una palabra Reservada')
def Usuario_no_valido(value):
if '@' in value or '+' in value or '-' in value:
raise ValidationError('Introduce un nombre de usuario valido')
def Registro_unico_correo(value):
if User.objects.filter(email__iexact=value).exists():
raise ValidationError('Esta direccion ya se encuentra registrada')
def Registro_unico_usuario(value):
if User.objects.filter(username__iexact=value).exists():
raise ValidationError('Ya existe un usuario con este nombre')
class SignUpForm(forms.ModelForm):
username = forms.CharField(widget=forms.TextInput(attrs={'class':'form-control'}),
max_length=30,
required=True,
help_text='El nombre de usuario puede contener <strong>Alfanumericos</strong>, <strong>_</strong> y <strong>.</strong> caracteres')
password = forms.CharField(widget=forms.PasswordInput(attrs={'class':'form-control'}))
confirm_password = forms.CharField(widget=forms.PasswordInput(attrs={'class':'form-control'}),
label="Confirm your password",
required=True)
email = forms.CharField(widget=forms.EmailInput(attrs={'class':'form-control'}),
required=True,
max_length=75)
class Meta:
model = User
exclude = ['last_login', 'date_joined']
fields = ['username', 'email', 'password', 'confirm_password',]
def __init__(self, *args, **kwargs):
super(SignUpForm, self).__init__(*args, **kwargs)
self.fields['username'].validators.append(Palabras_reservadas)
self.fields['username'].validators.append(Usuario_no_valido)
self.fields['username'].validators.append(Registro_unico_usuario)
self.fields['email'].validators.append(Registro_unico_correo)
self.fields['email'].validators.append(Validardominio)
def clean(self):
super(SignUpForm, self).clean()
password = self.cleaned_data.get('password')
confirm_password = self.cleaned_data.get('confirm_password')
if password and password != confirm_password:
self._errors['password'] = self.error_class(['Passwords no coinciden'])
return self.cleaned_data | mit | 6,717,397,107,873,970,000 | 51.097222 | 180 | 0.652267 | false |
tensor-tang/Paddle | python/paddle/fluid/contrib/slim/tests/test_quantization_mkldnn_pass.py | 1 | 7637 | # copyright (c) 2019 paddlepaddle authors. all rights reserved.
#
# licensed under the apache license, version 2.0 (the "license");
# you may not use this file except in compliance with the license.
# you may obtain a copy of the license at
#
# http://www.apache.org/licenses/license-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the license is distributed on an "as is" basis,
# without warranties or conditions of any kind, either express or implied.
# see the license for the specific language governing permissions and
# limitations under the license.
import os
import unittest
import random
import numpy as np
import paddle.fluid as fluid
import six
import paddle
from paddle.fluid.framework import IrGraph
from paddle.fluid.contrib.slim.quantization import QuantizationFreezePass
from paddle.fluid.contrib.slim.quantization import QuantizationTransformPass
from paddle.fluid.contrib.slim.quantization import TransformForMkldnnPass
from paddle.fluid import core
os.environ["CPU_NUM"] = "1"
def conv_net(img, label):
conv_pool_1 = fluid.nets.simple_img_conv_pool(
input=img,
filter_size=5,
num_filters=20,
pool_size=2,
pool_stride=2,
act="relu")
conv_pool_1 = fluid.layers.batch_norm(conv_pool_1)
conv_pool_2 = fluid.nets.simple_img_conv_pool(
input=conv_pool_1,
filter_size=5,
num_filters=50,
pool_size=2,
pool_stride=2,
act="relu")
prediction = fluid.layers.fc(input=conv_pool_2, size=10, act='softmax')
loss = fluid.layers.cross_entropy(input=prediction, label=label)
avg_loss = fluid.layers.mean(loss)
return avg_loss
class TestMKLDNNTransformBasedFreezePass(unittest.TestCase):
def setUp(self):
self.quantizable_op_and_inputs = {
'conv2d': ['Input', 'Filter'],
'depthwise_conv2d': ['Input', 'Filter'],
'mul': ['X', 'Y']
}
def check_program(self, program):
for block in program.blocks:
for op in block.ops:
if op.type in self.quantizable_op_and_inputs:
for arg_name in op.output_arg_names:
# Check quantizable op's output is linked to
# fake_dequantize's output
self.assertTrue(arg_name.endswith('.dequantized'))
def isinteger(self, x):
return np.equal(np.mod(x, 1), 0)
def build_program(self, main, startup, is_test, seed):
main.random_seed = seed
startup.random_seed = seed
with fluid.unique_name.guard():
with fluid.program_guard(main, startup):
img = fluid.layers.data(
name='image', shape=[1, 28, 28], dtype='float32')
label = fluid.layers.data(
name='label', shape=[1], dtype='int64')
loss = conv_net(img, label)
if not is_test:
opt = fluid.optimizer.Adam(learning_rate=0.001)
opt.minimize(loss)
return [img, label], loss
def mkldnn_based_freeze_graph(self,
use_cuda,
seed,
activation_quant_type,
weight_quant_type='abs_max',
for_ci=False):
random.seed(0)
np.random.seed(0)
main = fluid.Program()
startup = fluid.Program()
test_program = fluid.Program()
feeds, loss = self.build_program(main, startup, False, seed)
self.build_program(test_program, startup, True, seed)
test_program = test_program.clone(for_test=True)
main_graph = IrGraph(core.Graph(main.desc), for_test=False)
test_graph = IrGraph(core.Graph(test_program.desc), for_test=True)
place = fluid.CPUPlace()
exe = fluid.Executor(place)
scope = fluid.Scope()
with fluid.scope_guard(scope):
exe.run(startup)
# Apply the QAT QuantizationTransformPass
transform_pass = QuantizationTransformPass(
scope=scope,
place=place,
activation_quantize_type=activation_quant_type,
weight_quantize_type=weight_quant_type)
transform_pass.apply(main_graph)
transform_pass.apply(test_graph)
build_strategy = fluid.BuildStrategy()
build_strategy.memory_optimize = False
build_strategy.enable_inplace = False
binary = fluid.CompiledProgram(main_graph.graph).with_data_parallel(
loss_name=loss.name, build_strategy=build_strategy)
quantized_test_program = test_graph.to_program()
iters = 5
batch_size = 8
train_reader = paddle.batch(
paddle.reader.shuffle(
paddle.dataset.mnist.train(), buf_size=500),
batch_size=batch_size)
test_reader = paddle.batch(
paddle.dataset.mnist.test(), batch_size=batch_size)
feeder = fluid.DataFeeder(feed_list=feeds, place=place)
# Training the model to get the weights value
with fluid.scope_guard(scope):
for _ in range(iters):
data = next(train_reader())
loss_v = exe.run(binary,
feed=feeder.feed(data),
fetch_list=[loss])
# Freeze graph for inference, but the weight of fc/conv is still float type.
freeze_pass = QuantizationFreezePass(
scope=scope, place=place, weight_quantize_type=weight_quant_type)
freeze_pass.apply(test_graph)
# Transform quantized graph for MKL-DNN INT8 inference
mkldnn_int8_pass = TransformForMkldnnPass(scope=scope, place=place)
mkldnn_int8_pass.apply(test_graph)
dev_name = '_cpu_'
if not for_ci:
marked_nodes = set()
for op in test_graph.all_op_nodes():
if op.name().find('quantize') > -1:
marked_nodes.add(op)
test_graph.draw('.', 'test_mkldnn' + dev_name +
activation_quant_type + '_' + weight_quant_type,
marked_nodes)
mkldnn_program = test_graph.to_program()
# Check the transformation weights of conv2d and mul
conv_w_mkldnn = np.array(scope.find_var('conv2d_1.w_0').get_tensor())
mul_w_mkldnn = np.array(scope.find_var('fc_0.w_0').get_tensor())
# Check if weights are still integer
self.assertFalse(self.isinteger(np.sum(conv_w_mkldnn)))
self.assertFalse(self.isinteger(np.sum(mul_w_mkldnn)))
# Check if the conv2d output and mul output are correctly linked to fake_dequantize's
# output
self.check_program(mkldnn_program)
if not for_ci:
print('{}: {}'.format('w_mkldnn' + dev_name + activation_quant_type
+ '_' + weight_quant_type, np.sum(w_mkldnn)))
def test_mkldnn_graph_cpu_static(self):
with fluid.unique_name.guard():
self.mkldnn_based_freeze_graph(
False,
seed=2,
activation_quant_type='range_abs_max',
weight_quant_type='abs_max',
for_ci=True)
self.mkldnn_based_freeze_graph(
False,
seed=2,
activation_quant_type='moving_average_abs_max',
weight_quant_type='abs_max',
for_ci=True)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -3,937,644,403,507,950,000 | 38.164103 | 94 | 0.584785 | false |
hiromu2000/zaim | tests/test_api.py | 1 | 2458 | # coding: utf-8
import os
import unittest
import zaim
class TestApi(unittest.TestCase):
def setUp(self):
consumer_key = os.environ.get("ZAIM_CONSUMER_KEY", "")
consumer_secret = os.environ.get("ZAIM_CONSUMER_SECRET", "")
access_token = os.environ.get("ZAIM_ACCESS_TOKEN", "")
access_token_secret = os.environ.get("ZAIM_ACCESS_TOKEN_SECRET", "")
assert consumer_key, 'Please set "ZAIM_CONSUMER_KEY".'
assert consumer_secret, 'Please set "ZAIM_CONSUMER_SECRET".'
assert access_token, 'Please set "ZAIM_ACCESS_TOKEN".'
assert access_token_secret, 'Please set "ZAIM_ACCESS_TOKEN_SECRET".'
self.api = zaim.Api(consumer_key, consumer_secret, access_token, access_token_secret)
def test_verify(self):
self.assertIn('me', self.api.verify().keys())
def test_money(self):
response = self.api.money()
self.assertIn('money', response.keys())
def test_category(self):
response = self.api.category()
self.assertIn('categories', response.keys())
def test_genre(self):
response = self.api.genre()
self.assertIn('genres', response.keys())
def test_account(self):
self.assertIn('accounts', self.api.account().keys())
def __payment(self):
response = self.api.payment(
category_id='101',
genre_id='10101',
amount=1,
date='2020-04-01',
comment='comment',
name='name',
place='place',
from_account_id=0)
return response
def test_payment(self):
self.__payment()
response = self.api.money(
mapping=1,
category_id='101',
genre_id='10101',
mode='payment',
start_date='2020-04-01',
end_date='2020-04-01')
self.assertTrue(len(response['money']) > 0)
for tran in response['money']:
self.api.delete('payment', tran['id'])
def test_update(self):
response = self.__payment()
response = self.api.update('payment', response['money']['id'],
amount=1,
date='2020-04-01',
place='updated place',
name='updated name',
comment='updated comment')
self.assertIn('money', response.keys())
self.api.delete('payment', response['money']['id'])
if __name__ == '__main__':
unittest.main()
| mit | 5,947,318,330,705,038,000 | 32.216216 | 93 | 0.566721 | false |
ryankynor/Hello-friend | hellofriend.py | 1 | 1070 | """
hellofriend.py
Author: Ryan Kynor
Credit:
http://stackoverflow.com/questions/19664840/typeerror-cant-convert-float-object-to-str-implicitly
Milo
Assignment:
Write and submit an interactive Python program that asks for the user's name and age,
then prints how much older Python is than the user (based on a simple comparison of
birth year). Python's first public release occurred in 1991. Something like this:
Please tell me your name: Guido
Please tell me your age: 16
Hello, Guido. Python is 8 years older than you are!
Note that the text: "Guido" and "16" are entered by the user running the program.
The final line ("Hello...") is generated dynamically when you run the program, based
on the name and age that the user enters.
"""
name = input("Please tell me your name: ")
age = input("Please tell me your age: ")
int(age)-24
x=(int(age))
y=24-x
print("Hello, {0}. Python is {1} years older than you are!".format(name, y))
#s1 = "You are {0} years old."
#s2 = "pythin is {y} years older than you."
#print(int) s1.format((age))
#print(s2.format(age + 5)) | mit | -7,764,921,810,251,109,000 | 33.548387 | 97 | 0.729907 | false |
satoken/centroid-rna-package | python/test.py | 1 | 1094 | #!/usr/bin/env python
import CentroidFold
cf = CentroidFold.CentroidFold(CentroidFold.CentroidFold.CONTRAFOLD)
cf.calculate_posterior("GGGCCCAUAGCUCAGUGGUAGAGUGCCUCCUUUGCAAGGAGGAUGCCCUGGGUUCGAAUCCCAGUGGGUCCA")
ea,s=cf.decode_structure(4)
print s,ea
aln = [
"-----GCUA-AUAUCGCUGUGGAAACACCUGGAACCAUCCCGAACCCAGC-AGUUAAGCACAGUGGAGCUAAAU--GUA--G--G-UAGUAAUACUG----AG-AAUA",
"UCCGGUGACUUUACGCGUGAGGAAACACUCGUUCCCAUUCCGAACACGAC-AGUUAAGCUCCCG-CGGCCGAUGA--UAGUGCC--CA-CCA----GCGUGAA-AGUA"
]
cf.calculate_posterior(aln)
ea,s=cf.decode_structure(4)
print s,ea
cf = CentroidFold.CentroidFold(CentroidFold.CentroidFold.BOLTZMANN_ALIPFFOLD)
cf.calculate_posterior("GGGCCCAUAGCUCAGUGGUAGAGUGCCUCCUUUGCAAGGAGGAUGCCCUGGGUUCGAAUCCCAGUGGGUCCA")
ea,s=cf.decode_structure(4)
print s,ea
aln = [
"-----GCUA-AUAUCGCUGUGGAAACACCUGGAACCAUCCCGAACCCAGC-AGUUAAGCACAGUGGAGCUAAAU--GUA--G--G-UAGUAAUACUG----AG-AAUA",
"UCCGGUGACUUUACGCGUGAGGAAACACUCGUUCCCAUUCCGAACACGAC-AGUUAAGCUCCCG-CGGCCGAUGA--UAGUGCC--CA-CCA----GCGUGAA-AGUA"
]
cf.calculate_posterior(aln)
ea,s=cf.decode_structure(4)
print s,ea
| gpl-2.0 | -5,224,095,943,890,627,000 | 36.724138 | 115 | 0.80713 | false |
tompecina/legal | legal/hjp/urls.py | 1 | 1385 | # -*- coding: utf-8 -*-
#
# hjp/urls.py
#
# Copyright (C) 2011-19 Tomáš Pecina <[email protected]>
#
# This file is part of legal.pecina.cz, a web-based toolbox for lawyers.
#
# This application is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from django.conf.urls import url
from legal.common.views import genrender
from legal.hjp.views import mainpage, transform, transdel
urlpatterns = [
url(r'^$', mainpage, name='mainpage'),
url(r'^transform/(\d+)/$', transform, name='transform'),
url(r'^transform/$', transform, name='transform'),
url(r'^transdel/(\d+)/$', transdel, name='transdel'),
url(r'^transdeleted/$',
genrender,
kwargs={
'template': 'hjp_transdeleted.xhtml',
'page_title': 'Smazání transakce'},
name='transdeleted'),
]
| gpl-3.0 | -6,588,313,614,671,599,000 | 33.525 | 72 | 0.692976 | false |
dsalazarr/pfc_ii | pfc/pfc/applications/models.py | 1 | 2347 | from __future__ import unicode_literals
from django.db import models
from django.conf import settings
from oauth2_provider.models import Application as ApplicationModel, AccessToken as AccessTokenModel
from pfc.users.models import Company, User
class ApplicationConfig(models.Model):
id = models.AutoField(primary_key=True)
application = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL)
key = models.CharField(max_length=255, null=False)
value = models.CharField(max_length=255, null=False)
class Meta:
unique_together = ('application', 'key')
class License(models.Model):
LICENSE_TYPES = (
('DAY', 'DAY'),
('MONTH', 'MONTH'),
('YEAR', 'YEAR'),
)
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=256)
type = models.CharField(max_length=15, choices=LICENSE_TYPES)
max_users = models.IntegerField("Maximum number of users")
duration_days = models.IntegerField("Duration days of the license")
def __str__(self):
return self.name
class CompanyApplicationLicense(models.Model):
company = models.ForeignKey(Company, related_name='licenses')
license = models.ForeignKey(License)
application = models.ForeignKey(settings.OAUTH2_PROVIDER_APPLICATION_MODEL)
active = models.BooleanField(default=True)
start_date = models.DateTimeField()
end_date = models.DateTimeField(null=True)
def __str__(self):
return "%s %s" % (self.application, self.license)
class UserApplicationLicense(models.Model):
user = models.ForeignKey(User, related_name='licenses')
company_license = models.ForeignKey(CompanyApplicationLicense)
class Meta:
unique_together = (
('user', 'company_license')
)
class Permission(models.Model):
application = models.ForeignKey(ApplicationModel)
id = models.AutoField(primary_key=True)
codename = models.CharField(max_length=50)
name = models.CharField(max_length=256)
class Meta:
unique_together = (
('application', 'codename')
)
def __str__(self):
return "{} | {}".format(self.application.name, self.name)
class Application(ApplicationModel):
class Meta:
proxy = True
class AccessToken(AccessTokenModel):
class Meta:
proxy = True
| gpl-3.0 | -8,329,098,107,580,959,000 | 27.621951 | 99 | 0.683852 | false |
larlequin/CleanMyBib | CleanMyBib/CleanMyBib_Qt.py | 1 | 12400 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys, os
import platform
import re
import csv
from PyQt4 import QtGui
from PyQt4 import QtCore
from CleanMyBib import CleanFileBib
from pybtex.database.input import bibtex
__version__ = "4.0.0"
# Allow to read the picture files in pyinstaller
datadir = ".img"
if not hasattr(sys, "frozen"): # not packed
datadir = os.path.join(os.path.dirname(__file__), datadir)
elif "_MEIPASS2" in os.environ: # one-file temp's directory
datadir = os.path.join(os.environ["_MEIPASS2"], datadir)
else: # one-dir
datadir = os.path.join(os.path.dirname(sys.argv[0]), datadir)
# --------------------------------------------------------------
# GRAPHICAL INTERFACE FOR CLEAN MY BIB
# --------------------------------------------------------------
class MainWindow(QtGui.QMainWindow):
def __init__(self):
""" Define the main widgets and options
"""
super(MainWindow, self).__init__()
# Default fields to keep in the cleaned file
self.chx = ['abstract','author','year','title','booktitle', 'journal',\
'pages', 'volume', 'editor','publisher','address']
# Create the main frame to handle the widgets
self.mainWidget=QtGui.QWidget(self)
self.setCentralWidget(self.mainWidget)
self.grid = QtGui.QGridLayout(self.mainWidget) # Define a grid
self.setLayout(self.grid)
# Create a status bar
self.status = self.statusBar()
self.status.showMessage("Ready", 5000)
# Call the menu, options and status bar
self.menu()
self.style_block()
self.bibFile()
self.statusBib()
# Define the main window size and name
self.setWindowTitle('Clean My Bib')
self.show()
def menu(self):
""" Define the action in the Menu and Toolbar
"""
# Options
options = QtGui.QAction(QtGui.QIcon('opt.jpeg'), 'Options', self)
options.setShortcut('Ctrl+O')
options.setStatusTip('Change the fields to ignore')
options.triggered.connect(self.Opts)
# Exit
exitAction = QtGui.QAction(QtGui.QIcon('exit2.jpeg'), 'Exit', self)
exitAction.setShortcut('Ctrl+Q')
exitAction.setStatusTip('Exit application')
exitAction.triggered.connect(self.close)
# About
aboutAction = QtGui.QAction(QtGui.QIcon('about.jpeg'), 'About', self)
aboutAction.setStatusTip('About Clean My Bib')
aboutAction.triggered.connect(self.about)
# Fill the Menu
menubar = self.menuBar()
mainMenu = menubar.addMenu('&Menu')
mainMenu.addAction(options)
mainMenu.addAction(aboutAction)
mainMenu.addAction(exitAction)
def style_block(self):
""" Define the section of the GUI dedicated to the style format
User can choose between the 'Long' and 'Short' style for the journal
name and the page numbers.
An option is provided to add or not the DOIs
"""
# Create a ComboBox to select the journal name's style
cx_journal_style = QtGui.QComboBox(self)
list_style = [" Long format", "Short format"]
cx_journal_style.addItems(list_style)
cx_journal_style.SizeAdjustPolicy(1)
# Create a ComboBox to select the page numbers' style
cx_pages_style = QtGui.QComboBox(self)
cx_pages_style.addItems(list_style)
cx_pages_style.SizeAdjustPolicy(1)
# Create a checkbox for the DOIs
self.add_doi = QtGui.QCheckBox("Add DOIs")
# Define some Logo and Labels to display information to the user
logo1 = QtGui.QLabel(self)
icon1 = QtGui.QPixmap(datadir+"/nb1.png")
logo1.setPixmap(icon1)
first_step = QtGui.QLabel("<b><font color ='darkblue'><h2> \
Define a style</b></h2>")
lab_style = QtGui.QLabel("<b>Journal</b>", self)
lab_format = QtGui.QLabel("<b>Pages</b>", self)
lab_example = QtGui.QLabel("<b>DOI</b>", self)
self.lab_empty = QtGui.QLabel("", self)
self.lab_empty.setMinimumSize(90, 25)
# Place the widgets on the grid.
self.grid.addWidget(logo1, 0, 1, 1, 2)
self.grid.addWidget(first_step, 0, 1, 1, 2)
self.grid.addWidget(lab_style, 2, 1)
self.grid.addWidget(cx_journal_style, 2, 2)
self.grid.addWidget(lab_format, 3, 1)
self.grid.addWidget(lab_example, 4, 1)
self.grid.addWidget(cx_pages_style, 3, 2)
self.grid.addWidget(self.add_doi, 4, 2)
# Control the style and the choice done
self.journal_style = "long"
self.pages_style = "long"
cx_journal_style.activated[int].connect(self.styleJournal)
cx_pages_style.activated[int].connect(self.stylePages)
self.mainWidget.connect(self.add_doi,
QtCore.SIGNAL('stateChanged(int)'), self.doi)
def doi(self):
""" Add and remove the doi fields in the list of fields to keep
"""
if self.add_doi.isChecked():
self.chx.append("doi")
else:
if "doi" in self.chx:
self.chx.remove("doi")
def styleJournal(self, style):
if style == 0:
self.journal_style = "long"
else:
self.journal_style = "short"
def stylePages(self, style):
if style == 0:
self.pages_style = "long"
else:
self.pages_style = "short"
def bibFile(self):
""" GUI section to receive a dropped file
And take it as the bib file to clean
"""
self.setAcceptDrops(True)
# Define a picture where to drop the file
self.dropIcon = QtGui.QLabel(self)
dragdrop = QtGui.QPixmap(datadir+"/drop.png")
self.dropIcon.setPixmap(dragdrop)
self.dropIcon.setAlignment(QtCore.Qt.AlignCenter)
# Define some Logo and Labels to display information to the user
logo2 = QtGui.QLabel(self)
icon2 = QtGui.QPixmap(datadir+"/nb2.png")
logo2.setPixmap(icon2)
second_step = QtGui.QLabel("<b><font color ='darkblue'><h2> \
Bibtex file</b></h2>")
lab_drop = QtGui.QLabel("<b><h3>Drop a bib file here</b></h3>", self)
lab_drop.setAlignment(QtCore.Qt.AlignCenter)
# Place the widgets on the grid
self.grid.addWidget(self.lab_empty, 2, 3, 1, 5) # Add an empty column
self.grid.addWidget(self.lab_empty, 2, 4, 1, 5) # Add an empty column
self.grid.addWidget(logo2, 0, 6, 1, 2)
self.grid.addWidget(second_step, 0, 6, 1, 2)
self.grid.addWidget(self.dropIcon, 2, 6, 2, 3)
self.grid.addWidget(lab_drop, 4, 6, 1, 3)
def dragEnterEvent(self, event):
if event.mimeData().hasUrls:
event.accept()
else:
event.ignore()
def dropEvent(self, event):
""" Extract the path of the dropped file
Call the CleanMyBib script and update the status bar
"""
for url in event.mimeData().urls():
path = url.toLocalFile().toLocal8Bit().data()
if os.path.isfile(path):
# Extract the path and open the cleaned file
rep, name = os.path.split(path)
name_bibOk = 'Cleaned_'+name
fileBibOK = open(os.path.join(rep, name_bibOk), 'w')
# Update the status bar
self.status.showMessage("File to clean: "+name, 5000)
# Prepare the fields to keep in the final file
fields = []
for item in self.chx:
fields.append(item.lower())
try:
CleanFileBib(path, fileBibOK, fields, self.journal_style, self.pages_style)
self.statusClean.setText("File cleaned successfully!")
icon4 = QtGui.QPixmap(datadir+"/success.png")
self.waitLogo.setPixmap(icon4)
self.status.showMessage("Drop another file", 5000)
except:
self.statusClean.setText("An error has occurred.\
\nPlease check your bibtex file\nand the log file")
icon5 = QtGui.QPixmap(datadir+"/error.png")
self.waitLogo.setPixmap(icon5)
fileBibOK.close()
def statusBib(self):
""" The third panel of the main frame is used to display the current
status of the file to be cleaned
"""
logo3 = QtGui.QLabel(self)
icon3 = QtGui.QPixmap(datadir+"/nb3.png")
logo3.setPixmap(icon3)
third_step = QtGui.QLabel("<b><font color ='darkblue'><h2> \
Clean my bib...</b></h2>")
self.statusClean = QtGui.QLabel("<i> <BR>Ready to receive <BR>a bibtex file</i>")
self.statusClean.setAlignment(QtCore.Qt.AlignCenter)
self.waitLogo = QtGui.QLabel(self)
self.icon4 = QtGui.QPixmap(datadir+"/wait.png")
self.waitLogo.setPixmap(self.icon4)
self.waitLogo.setAlignment(QtCore.Qt.AlignCenter)
# Display the widgets on the grid
self.grid.addWidget(logo3, 0, 12, 1, 2)
self.grid.addWidget(third_step, 0, 12, 1, 2)
self.grid.addWidget(self.statusClean, 1, 12, 2, 3)
self.grid.addWidget(self.waitLogo, 3, 12, 1, 2)
def about(self):
QtGui.QMessageBox.about(self, "About Clean My Bib",
"""<b>Clean My Bib</b> v %s
<p><b>Licence:</b> GPLv3 by GT Vallet
<p>This application can be used to prepare a bibtex
file to prepare the journal name and page numbers format into short or
long forms.
<p>Python %s - on %s""" % (
__version__, platform.python_version(), platform.system()))
def Opts(self):
""" Option panel to add/remove key words defining the fields
to add in the cleaned bibtex file
"""
opt = QtGui.QDialog(self)
opt.setWindowTitle('Options -- Fields to keep')
self.listOpt = QtGui.QListWidget(opt)
for item in sorted(self.chx):
self.listOpt.addItem(item.capitalize())
# Define the buttons
AddBt = QtGui.QPushButton('Add', opt)
RemBt = QtGui.QPushButton('Remove', opt)
QtBt = QtGui.QPushButton('Quit', opt)
Cl_Bt = QtGui.QPushButton('Cancel', opt)
# Define the action associated to the buttons
RemBt.clicked.connect(self.RemoveField)
AddBt.clicked.connect(self.Add)
Cl_Bt.clicked.connect(opt.close)
QtBt.clicked.connect(opt.close)
QtBt.clicked.connect(self.UpList)
# Place the widgets on the grid
grid_opt = QtGui.QGridLayout()
grid_opt.addWidget(self.listOpt, 0, 0, 5, 3)
grid_opt.addWidget(AddBt, 0, 3)
grid_opt.addWidget(RemBt, 1, 3)
grid_opt.addWidget(QtBt, 5, 3)
grid_opt.addWidget(Cl_Bt, 5, 2)
# Show the option window
opt.setLayout(grid_opt)
opt.show()
def Add(self):
""" Add a new field to the list
"""
text, ok = QtGui.QInputDialog.getText(self, 'Input Dialog',
'Add a field:')
if ok:
self.listOpt.addItem(str(text))
self.listOpt.sortItems(order = QtCore.Qt.AscendingOrder)
def RemoveField(self):
""" Remove a field for the list
"""
index = self.listOpt.currentRow()
self.listOpt.takeItem(index)
self.listOpt.sortItems(order = QtCore.Qt.AscendingOrder)
def UpList(self):
""" Finally update the list of field to send back to the program
"""
self.chx = []
for index in xrange(self.listOpt.count()):
self.chx.append(str(self.listOpt.item(index).text()))
# --------------------------------------------------------------
# START THE APPLICATION
# --------------------------------------------------------------
def main():
"""Define the main application
Calling the UI
"""
app = QtGui.QApplication(sys.argv)
ex = MainWindow()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
| gpl-3.0 | 7,906,700,085,643,785,000 | 37.509317 | 95 | 0.575081 | false |
KhronosGroup/COLLADA-CTS | Core/Gui/Dialog/FOpenDialog.py | 1 | 6196 | # Copyright (c) 2012 The Khronos Group Inc.
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and /or associated documentation files (the "Materials "), to deal in the Materials without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Materials, and to permit persons to whom the Materials are furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Materials.
# THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
import os
import os.path
import wx
from Core.Common.FConstants import *
class FOpenDialog(wx.Dialog):
__DIALOG_TITLE = "Open Test Procedure"
def __init__(self, parent):
wx.Dialog.__init__(self, parent, wx.ID_ANY, FOpenDialog.__DIALOG_TITLE)
self.__ID_OK = wx.NewId()
self.__ID_CANCEL = wx.NewId()
self.__ID_PROCEDURE = wx.NewId()
self.__ID_PROCEDURE = wx.NewId()
self.__commentsCtrl = None
self.__proceduresCtrl = None
outterSizer = wx.BoxSizer(wx.VERTICAL)
self.SetSizer(outterSizer)
procedureSizer = self.__GetProcedureSizer()
commentSizer = self.__GetCommentsSizer()
bottomSizer = self.__GetBottomSizer()
outterSizer.Add(procedureSizer, 0, wx.EXPAND | wx.ALL, 5)
outterSizer.Add(commentSizer, 0, wx.EXPAND | wx.ALL, 5)
outterSizer.Add(bottomSizer, 0, wx.ALIGN_CENTER | wx.ALL, 5)
self.Fit()
def GetPath(self):
selection = self.__proceduresCtrl.GetStringSelection()
if (selection == ""): return None
return os.path.abspath(os.path.join(
RUNS_FOLDER, self.__proceduresCtrl.GetStringSelection(),
TEST_PROCEDURE_FILENAME))
def __OnOk(self, e):
if (self.__proceduresCtrl.GetStringSelection() == ""): return
if (self.IsModal()):
self.EndModal(wx.ID_OK)
else:
self.SetReturnCode(wx.ID_OK)
self.Show(False)
def __OnCancel(self, e):
if (self.IsModal()):
self.EndModal(wx.ID_CANCEL)
else:
self.SetReturnCode(wx.ID_CANCEL)
self.Show(False)
def __OnClick(self, e):
file = os.path.join(RUNS_FOLDER,
self.__proceduresCtrl.GetStringSelection(),
TEST_PROCEDURE_COMMENTS)
comments = ""
if (os.path.isfile(file)):
f = open(file)
line = f.readline()
while (line):
comments = comments + line
line = f.readline()
f.close()
self.__commentsCtrl.SetValue(comments)
def __OnDClick(self, e):
self.__OnOk(e)
def __GetProcedureSizer(self):
"""Retuns the Sizer used to display test procedures."""
staticBox = wx.StaticBox(self, wx.ID_ANY, "Available Test Procedures")
sizer = wx.StaticBoxSizer(staticBox, wx.HORIZONTAL)
choices = []
if (os.path.isdir(RUNS_FOLDER)):
for entry in os.listdir(RUNS_FOLDER):
if (os.path.isfile(os.path.join(
RUNS_FOLDER, entry, TEST_PROCEDURE_FILENAME))):
choices.append(entry)
self.__proceduresCtrl = wx.ListBox(self, self.__ID_PROCEDURE,
size = wx.Size(300, 140), choices = choices,
style = wx.LB_SINGLE | wx.LB_SORT)
self.Bind(wx.EVT_LISTBOX, self.__OnClick, self.__proceduresCtrl,
self.__ID_PROCEDURE)
self.Bind(wx.EVT_LISTBOX_DCLICK, self.__OnDClick,
self.__proceduresCtrl, self.__ID_PROCEDURE)
sizer.Add(self.__proceduresCtrl, 1, wx.EXPAND | wx.ALL, 5)
return sizer
def __GetCommentsSizer(self):
"""Returns the Sizer used for comments."""
staticBox = wx.StaticBox(self, wx.ID_ANY, "Test Procedure Comments")
sizer = wx.StaticBoxSizer(staticBox, wx.HORIZONTAL)
self.__commentsCtrl = wx.TextCtrl(self, wx.ID_ANY, "",
size = wx.Size(300, 60),
style = wx.TE_MULTILINE | wx.TE_READONLY)
self.__commentsCtrl.SetBackgroundColour(
wx.SystemSettings.GetColour(wx.SYS_COLOUR_BTNFACE))
sizer.Add(self.__commentsCtrl, 1, wx.EXPAND | wx.ALL, 5)
return sizer
def __GetBottomSizer(self):
"""Returns the Sizer used to confirm or cancel this dialog."""
okButton = wx.Button(self, self.__ID_OK, "Ok")
wx.EVT_BUTTON(self, self.__ID_OK, self.__OnOk)
cancelButton = wx.Button(self, self.__ID_CANCEL, "Cancel")
wx.EVT_BUTTON(self, self.__ID_CANCEL, self.__OnCancel)
bottomSizer = wx.BoxSizer(wx.HORIZONTAL)
bottomSizer.Add(okButton, 0, wx.ALIGN_LEFT)
bottomSizer.Add(cancelButton, 0, wx.ALIGN_RIGHT)
return bottomSizer
# Used to start up this dialog without the entire application.
##class MainFrame(wx.MDIParentFrame):
## def __init__(self, parent, id, title):
## wx.MDIParentFrame.__init__(self, parent, id, title, size = (600, 480),
## style = wx.DEFAULT_FRAME_STYLE | wx.NO_FULL_REPAINT_ON_RESIZE)
##
## dialog = FOpenDialog(self)
## if (dialog.ShowModal() == wx.ID_OK):
## print dialog.GetPath()
## print "ok"
## else:
## print "cancelled"
##
##app = wx.PySimpleApp()
##frame = MainFrame(None,-1, "Test")
##app.MainLoop()
| mit | -3,829,923,993,941,251,600 | 40.864865 | 466 | 0.598612 | false |
deepmind/distrax | distrax/_src/bijectors/tfp_compatible_bijector.py | 1 | 7577 | # Copyright 2021 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Wrapper to adapt a Distrax bijector for use in TFP."""
from typing import Any, Optional
import chex
from distrax._src.bijectors import bijector
from distrax._src.utils import math
import jax
import jax.numpy as jnp
from tensorflow_probability.substrates import jax as tfp
tfb = tfp.bijectors
tfd = tfp.distributions
Array = chex.Array
Bijector = bijector.Bijector
def tfp_compatible_bijector(
base_bijector: Bijector,
name: Optional[str] = None):
"""Create a TFP-compatible bijector from a Distrax bijector.
Given a Distrax bijector, return a wrapped bijector that behaves as a TFP
bijector, to be used in TFP meta-bijectors and the TransformedDistribution.
In particular, the wrapped bijector implements the methods
`[forward|inverse]_event_ndims`, `[forward|inverse]_event_shape`,
`[forward|inverse]_event_shape_tensor`, `[forward|inverse]_log_det_jacobian`,
and the properties `[forward|inverse]_min_event_ndims`. Other attributes are
delegated to the `base_bijector`.
The methods of the resulting object do not take a `name` argument,
unlike their TFP equivalents.
The `shape` methods are implemented by tracing the `forward` and `inverse`
methods of the bijector, applied to a zero tensor of the requested dtype. If
the `forward` or `inverse` methods are not traceable or cannot be applied to a
zero tensor, then we cannot guarantee the correctness of the result.
Args:
base_bijector: A Distrax bijector.
name: The bijector name.
Returns:
An object that behaves like a TFP bijector.
"""
name_ = name
class TFPCompatibleBijector(base_bijector.__class__):
"""Class to wrap a Distrax bijector."""
def __init__(self):
self._is_injective = True
self._is_permutation = False
self._parts_interact = False
self.dtype = None
self.has_static_min_event_ndims = True
self.forward_min_event_ndims = base_bijector.event_ndims_in
self.inverse_min_event_ndims = base_bijector.event_ndims_out
def __getattr__(self, name: str):
return getattr(base_bijector, name)
def forward_and_log_det(self, x: Array) -> Array:
"""See `Bijector.forward_and_log_det`."""
return base_bijector.forward_and_log_det(x)
@property
def name(self) -> str:
"""The name of the wrapped bijector."""
return name_ or f"TFPCompatible{base_bijector.name}"
def experimental_batch_shape(self, x_event_ndims=None, y_event_ndims=None):
raise NotImplementedError()
def experimental_batch_shape_tensor(
self, x_event_ndims=None, y_event_ndims=None):
raise NotImplementedError()
def forward_dtype(self, _: jnp.dtype) -> None:
"""Returns None, making no promise regarding dtypes."""
return None
def inverse_dtype(self, _: jnp.dtype) -> None:
"""Returns None, making no promise regarding dtypes."""
return None
def forward_event_ndims(self, event_ndims: int) -> int:
"""Returns the number of event dimensions of the output of `forward`."""
extra_event_ndims = self._check_ndims(
"Forward", event_ndims, base_bijector.event_ndims_in)
return base_bijector.event_ndims_out + extra_event_ndims
def inverse_event_ndims(self, event_ndims: int) -> int:
"""Returns the number of event dimensions of the output of `inverse`."""
extra_event_ndims = self._check_ndims(
"Inverse", event_ndims, base_bijector.event_ndims_out)
return base_bijector.event_ndims_in + extra_event_ndims
def forward_event_shape(self, event_shape) -> tfp.tf2jax.TensorShape:
"""Returns the shape of the output of `forward` as a `TensorShape`."""
self._check_shape("Forward", event_shape, base_bijector.event_ndims_in)
forward_event_shape = jax.eval_shape(
base_bijector.forward, jnp.zeros(event_shape)).shape
return tfp.tf2jax.TensorShape(forward_event_shape)
def inverse_event_shape(self, event_shape) -> tfp.tf2jax.TensorShape:
"""Returns the shape of the output of `inverse` as a `TensorShape`."""
self._check_shape("Inverse", event_shape, base_bijector.event_ndims_out)
inverse_event_shape = jax.eval_shape(
base_bijector.inverse, jnp.zeros(event_shape)).shape
return tfp.tf2jax.TensorShape(inverse_event_shape)
def forward_event_shape_tensor(self, event_shape) -> Array:
"""Returns the shape of the output of `forward` as a `jnp.array`."""
self._check_shape("Forward", event_shape, base_bijector.event_ndims_in)
forward_event_shape = jax.eval_shape(
base_bijector.forward, jnp.zeros(event_shape)).shape
return jnp.array(forward_event_shape, dtype=jnp.int32)
def inverse_event_shape_tensor(self, event_shape) -> Array:
"""Returns the shape of the output of `inverse` as a `jnp.array`."""
self._check_shape("Inverse", event_shape, base_bijector.event_ndims_out)
inverse_event_shape = jax.eval_shape(
base_bijector.inverse, jnp.zeros(event_shape)).shape
return jnp.array(inverse_event_shape, dtype=jnp.int32)
def forward_log_det_jacobian(
self, x: Array, event_ndims: Optional[int] = None) -> Array:
"""See `Bijector.forward_log_det_jacobian`."""
extra_event_ndims = self._check_ndims(
"Forward", event_ndims, base_bijector.event_ndims_in)
fldj = base_bijector.forward_log_det_jacobian(x)
return math.sum_last(fldj, extra_event_ndims)
def inverse_log_det_jacobian(
self, y: Array, event_ndims: Optional[int] = None) -> Array:
"""See `Bijector.inverse_log_det_jacobian`."""
extra_event_ndims = self._check_ndims(
"Inverse", event_ndims, base_bijector.event_ndims_out)
ildj = base_bijector.inverse_log_det_jacobian(y)
return math.sum_last(ildj, extra_event_ndims)
def _check_ndims(
self, direction: str, event_ndims: int, expected_ndims: int) -> int:
"""Checks that `event_ndims` are correct and returns any extra ndims."""
if event_ndims is not None and event_ndims < expected_ndims:
raise ValueError(f"{direction} `event_ndims` of {self.name} must be at "
f"least {expected_ndims} but was passed {event_ndims} "
f"instead.")
return 0 if event_ndims is None else event_ndims - expected_ndims
def _check_shape(
self, direction: str, event_shape: Any, expected_ndims: int):
"""Checks that `event_shape` is correct, raising ValueError otherwise."""
if len(event_shape) < expected_ndims:
raise ValueError(f"{direction} `event_shape` of {self.name} must have "
f"at least {expected_ndims} dimensions, but was "
f"{event_shape} which has only {len(event_shape)} "
f"dimensions instead.")
return TFPCompatibleBijector()
| apache-2.0 | 1,191,359,019,594,068,500 | 41.567416 | 80 | 0.674145 | false |
uwosh/uwosh.dropcard | uwosh/dropcard/__init__.py | 1 | 2069 | """Main product initializer
"""
from zope.i18nmessageid import MessageFactory
from uwosh.dropcard import config
from Products.Archetypes import atapi
from Products.CMFCore import utils
# Define a message factory for when this product is internationalised.
# This will be imported with the special name "_" in most modules. Strings
# like _(u"message") will then be extracted by i18n tools for translation.
dropcardMessageFactory = MessageFactory('uwosh.dropcard')
def initialize(context):
"""Initializer called when used as a Zope 2 product.
This is referenced from configure.zcml. Regstrations as a "Zope 2 product"
is necessary for GenericSetup profiles to work, for example.
Here, we call the Archetypes machinery to register our content types
with Zope and the CMF.
"""
# Retrieve the content types that have been registered with Archetypes
# This happens when the content type is imported and the registerType()
# call in the content type's module is invoked. Actually, this happens
# during ZCML processing, but we do it here again to be explicit. Of
# course, even if we import the module several times, it is only run
# once.
content_types, constructors, ftis = atapi.process_types(
atapi.listTypes(config.PROJECTNAME),
config.PROJECTNAME)
# Now initialize all these content types. The initialization process takes
# care of registering low-level Zope 2 factories, including the relevant
# add-permission. These are listed in config.py. We use different
# permissions for each content type to allow maximum flexibility of who
# can add which content types, where. The roles are set up in rolemap.xml
# in the GenericSetup profile.
for atype, constructor in zip(content_types, constructors):
utils.ContentInit('%s: %s' % (config.PROJECTNAME, atype.portal_type),
content_types=(atype, ),
permission=config.ADD_PERMISSIONS[atype.portal_type],
extra_constructors=(constructor,),
).initialize(context)
| gpl-2.0 | -6,773,124,266,639,195,000 | 40.38 | 78 | 0.727888 | false |
all-of-us/raw-data-repository | tests/api_tests/test_physical_measurements_api.py | 1 | 26317 | import datetime
import http.client
import json
from rdr_service import main
from rdr_service.clock import FakeClock
from rdr_service.dao.participant_dao import ParticipantDao
from rdr_service.dao.physical_measurements_dao import PhysicalMeasurementsDao
from rdr_service.model.measurements import Measurement
from rdr_service.model.utils import from_client_participant_id
from rdr_service.participant_enums import UNSET_HPO_ID
from tests.test_data import data_path, load_measurement_json, load_measurement_json_amendment
from tests.helpers.unittest_base import BaseTestCase
class PhysicalMeasurementsApiTest(BaseTestCase):
def setUp(self):
super(PhysicalMeasurementsApiTest, self).setUp()
self.participant_id = self.create_participant()
self.participant_id_2 = self.create_participant()
self.time1 = datetime.datetime(2018, 1, 1)
self.time2 = datetime.datetime(2018, 2, 2)
def _insert_measurements(self, now=None):
measurements_1 = load_measurement_json(self.participant_id, now)
measurements_2 = load_measurement_json(self.participant_id_2, now)
path_1 = "Participant/%s/PhysicalMeasurements" % self.participant_id
path_2 = "Participant/%s/PhysicalMeasurements" % self.participant_id_2
self.send_post(path_1, measurements_1)
self.send_post(path_2, measurements_2)
def test_insert_before_consent_fails(self):
measurements_1 = load_measurement_json(self.participant_id)
path_1 = "Participant/%s/PhysicalMeasurements" % self.participant_id
self.send_post(path_1, measurements_1, expected_status=http.client.BAD_REQUEST)
def test_insert(self):
self.send_consent(self.participant_id)
self.send_consent(self.participant_id_2)
# now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
now = self.time1
self._insert_measurements(now.isoformat())
response = self.send_get("Participant/%s/PhysicalMeasurements" % self.participant_id)
self.assertEqual("Bundle", response["resourceType"])
self.assertEqual("searchset", response["type"])
self.assertFalse(response.get("link"))
self.assertTrue(response.get("entry"))
self.assertEqual(1, len(response["entry"]))
physical_measurements_id = response["entry"][0]["resource"]["id"]
pm_id = int(physical_measurements_id)
physical_measurements = PhysicalMeasurementsDao().get_with_children(physical_measurements_id)
self.assertEqual(physical_measurements.createdSiteId, 1)
self.assertIsNone(physical_measurements.createdUsername)
self.assertEqual(physical_measurements.finalizedSiteId, 2)
self.assertIsNone(physical_measurements.finalizedUsername)
em1 = Measurement(
measurementId=pm_id * 1000,
physicalMeasurementsId=pm_id,
codeSystem="http://terminology.pmi-ops.org/CodeSystem/physical-measurements",
codeValue="systolic-diastolic-blood-pressure-1",
measurementTime=now,
bodySiteCodeSystem="http://snomed.info/sct",
bodySiteCodeValue="368209003",
)
bp1 = Measurement(
measurementId=pm_id * 1000 + 1,
physicalMeasurementsId=pm_id,
codeSystem="http://terminology.pmi-ops.org/CodeSystem/physical-measurements",
codeValue="systolic-blood-pressure-1",
measurementTime=now,
valueDecimal=109.0,
valueUnit="mm[Hg]",
parentId=em1.measurementId,
)
bp2 = Measurement(
measurementId=pm_id * 1000 + 2,
physicalMeasurementsId=pm_id,
codeSystem="http://loinc.org",
codeValue="8462-4",
measurementTime=now,
valueDecimal=44.0,
valueUnit="mm[Hg]",
parentId=em1.measurementId,
)
m1 = physical_measurements.measurements[0]
self.assertEqual(em1.asdict(), m1.asdict())
self.assertEqual(2, len(m1.measurements))
self.assertEqual(bp1.asdict(), m1.measurements[0].asdict())
self.assertEqual(bp2.asdict(), m1.measurements[1].asdict())
response = self.send_get("Participant/%s/PhysicalMeasurements" % self.participant_id_2)
self.assertEqual("Bundle", response["resourceType"])
self.assertEqual("searchset", response["type"])
self.assertFalse(response.get("link"))
self.assertTrue(response.get("entry"))
self.assertEqual(1, len(response["entry"]))
def test_insert_and_amend(self):
self.send_consent(self.participant_id)
measurements_1 = load_measurement_json(self.participant_id)
path_1 = "Participant/%s/PhysicalMeasurements" % self.participant_id
response = self.send_post(path_1, measurements_1)
measurements_2 = load_measurement_json_amendment(self.participant_id, response["id"])
self.send_post(path_1, measurements_2)
response = self.send_get("Participant/%s/PhysicalMeasurements" % self.participant_id)
self.assertEqual(2, len(response["entry"]))
self.assertEqual("amended", response["entry"][0]["resource"]["entry"][0]["resource"]["status"])
def test_insert_with_qualifiers(self):
self.send_consent(self.participant_id)
now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
now = self.time2
with open(data_path("physical_measurements_2.json")) as measurements_file:
json_text = measurements_file.read() % {
"participant_id": self.participant_id,
"authored_time": now.isoformat(),
}
measurements_json = json.loads(json_text)
path_1 = "Participant/%s/PhysicalMeasurements" % self.participant_id
self.send_post(path_1, measurements_json)
response = self.send_get("Participant/%s/PhysicalMeasurements" % self.participant_id)
self.assertEqual("Bundle", response["resourceType"])
self.assertEqual("searchset", response["type"])
self.assertFalse(response.get("link"))
self.assertTrue(response.get("entry"))
self.assertEqual(1, len(response["entry"]))
physical_measurements_id = response["entry"][0]["resource"]["id"]
pm_id = int(physical_measurements_id)
physical_measurements = PhysicalMeasurementsDao().get_with_children(physical_measurements_id)
self.assertEqual(physical_measurements.finalizedSiteId, 1)
self.assertEqual("[email protected]", physical_measurements.finalizedUsername)
# Site not present in DB, so we don't set it.
self.assertIsNone(physical_measurements.createdSiteId)
self.assertEqual("[email protected]", physical_measurements.createdUsername)
em1_id = pm_id * 1000
bp1 = Measurement(
measurementId=pm_id * 1000 + 1,
physicalMeasurementsId=pm_id,
codeSystem="http://loinc.org",
codeValue="8480-6",
measurementTime=now,
valueDecimal=109.0,
valueUnit="mm[Hg]",
parentId=em1_id,
)
bp2 = Measurement(
measurementId=pm_id * 1000 + 2,
physicalMeasurementsId=pm_id,
codeSystem="http://loinc.org",
codeValue="8462-4",
measurementTime=now,
valueDecimal=44.0,
valueUnit="mm[Hg]",
parentId=em1_id,
)
bp3 = Measurement(
measurementId=pm_id * 1000 + 3,
physicalMeasurementsId=pm_id,
codeSystem="http://terminology.pmi-ops.org/CodeSystem/physical-measurements",
codeValue="arm-circumference",
measurementTime=now,
valueDecimal=32.0,
valueUnit="cm",
parentId=em1_id,
)
em1 = Measurement(
measurementId=pm_id * 1000,
physicalMeasurementsId=pm_id,
codeSystem="http://loinc.org",
codeValue="55284-4",
measurementTime=now,
bodySiteCodeSystem="http://snomed.info/sct",
bodySiteCodeValue="368209003",
measurements=[bp1, bp2, bp3],
)
pm_height_system = "http://terminology.pmi-ops.org/CodeSystem/protocol-modifications-height"
q1 = Measurement(
measurementId=pm_id * 1000 + 4,
physicalMeasurementsId=pm_id,
codeSystem="http://terminology.pmi-ops.org/CodeSystem/physical-measurements",
codeValue="protocol-modifications-height",
measurementTime=now,
valueCodeSystem=pm_height_system,
valueCodeValue="hair-style",
valueCodeDescription="Hair style"
)
em2 = Measurement(
measurementId=pm_id * 1000 + 5,
physicalMeasurementsId=pm_id,
codeSystem="http://terminology.pmi-ops.org/CodeSystem/physical-measurements",
codeValue="pre-pregnancy-weight",
measurementTime=now,
valueDecimal=28.0,
valueUnit="kg",
)
pm_weight_system = "http://terminology.pmi-ops.org/CodeSystem/protocol-modifications-weight"
q2 = Measurement(
measurementId=pm_id * 1000 + 6,
physicalMeasurementsId=pm_id,
codeSystem="http://terminology.pmi-ops.org/CodeSystem/physical-measurements",
codeValue="protocol-modifications-weight",
measurementTime=now,
valueCodeSystem=pm_weight_system,
valueCodeValue="other",
valueCodeDescription="Participant could not remove boots weight 20 pounds"
)
em3 = Measurement(
measurementId=pm_id * 1000 + 7,
physicalMeasurementsId=pm_id,
codeSystem="http://loinc.org",
codeValue="39156-5",
measurementTime=now,
valueDecimal=24.2,
valueUnit="kg/m2",
)
# Skip a bunch -- could add these later
em4 = Measurement(
measurementId=pm_id * 1000 + 14,
physicalMeasurementsId=pm_id,
codeSystem="http://loinc.org",
codeValue="8302-2",
measurementTime=now,
valueDecimal=111.5,
valueUnit="cm",
qualifiers=[q1],
)
em5 = Measurement(
measurementId=pm_id * 1000 + 15,
physicalMeasurementsId=pm_id,
codeSystem="http://loinc.org",
codeValue="29463-7",
measurementTime=now,
valueDecimal=30.1,
valueUnit="kg",
qualifiers=[q2],
)
m = {
measurement.measurementId: measurement.asdict(follow={"measurements": {}, "qualifiers": {}})
for measurement in physical_measurements.measurements
}
for em in [em1, bp1, bp2, bp3, q1, em2, q2, em3, em4, em5]:
self.assertEqual(em.asdict(follow={"measurements": {}, "qualifiers": {}}), m.get(em.measurementId))
def test_physical_measurements_sync(self):
self.send_consent(self.participant_id)
self.send_consent(self.participant_id_2)
sync_response = self.send_get("PhysicalMeasurements/_history")
self.assertEqual("Bundle", sync_response["resourceType"])
self.assertEqual("history", sync_response["type"])
link = sync_response.get("link")
self.assertIsNone(link)
self.assertTrue(len(sync_response["entry"]) == 0)
self._insert_measurements()
sync_response = self.send_get("PhysicalMeasurements/_history?_count=1")
self.assertTrue(sync_response.get("entry"))
link = sync_response.get("link")
self.assertTrue(link)
self.assertEqual("next", link[0]["relation"])
self.assertEqual(1, len(sync_response["entry"]))
prefix_index = link[0]["url"].index(main.API_PREFIX)
relative_url = link[0]["url"][prefix_index + len(main.API_PREFIX):]
sync_response_2 = self.send_get(relative_url)
self.assertEqual(1, len(sync_response_2["entry"]))
self.assertNotEqual(sync_response["entry"][0], sync_response_2["entry"][0])
def test_auto_pair_called(self):
pid_numeric = from_client_participant_id(self.participant_id)
participant_dao = ParticipantDao()
self.send_consent(self.participant_id)
self.send_consent(self.participant_id_2)
self.assertEqual(participant_dao.get(pid_numeric).hpoId, UNSET_HPO_ID)
self._insert_measurements(datetime.datetime.utcnow().isoformat())
self.assertNotEqual(participant_dao.get(pid_numeric).hpoId, UNSET_HPO_ID)
def get_composition_resource_from_fhir_doc(self, data):
"""
Return the Composition object from the PM Bundle
:param data: dict
:return: dict
"""
for entry in data['entry']:
if entry['resource'].get('resourceType', '') == 'Composition':
return entry['resource']
return None
def test_cancel_a_physical_measurement(self):
_id = self.participant_id.strip("P")
self.send_consent(self.participant_id)
measurement = load_measurement_json(self.participant_id)
measurement2 = load_measurement_json(self.participant_id)
path = "Participant/%s/PhysicalMeasurements" % self.participant_id
response = self.send_post(path, measurement)
# send another PM
self.send_post(path, measurement2)
path = path + "/" + response["id"]
cancel_info = BaseTestCase.get_restore_or_cancel_info()
ps = self.send_get("ParticipantSummary?participantId=%s" % _id)
self.send_patch(path, cancel_info)
response = self.send_get(path)
composition = self.get_composition_resource_from_fhir_doc(response)
self.assertEqual(composition["status"], "entered-in-error")
count = 0
for ext in composition['extension']:
if 'cancelled-site' in ext['url']:
self.assertEqual(ext['valueInteger'], 1)
count += 1
if 'cancelled-username' in ext['url']:
self.assertEqual(ext['valueString'], '[email protected]')
count += 1
if 'cancelled-reason' in ext['url']:
self.assertEqual(ext['valueString'], 'a mistake was made.')
count += 1
if 'authored-location' in ext['url']:
self.assertEqual(ext['valueString'], 'Location/hpo-site-monroeville')
count += 1
if 'finalized-location' in ext['url']:
self.assertEqual(ext['valueString'], 'Location/hpo-site-bannerphoenix')
count += 1
self.assertEqual(count, 5)
ps = self.send_get("ParticipantSummary?participantId=%s" % _id)
# should be completed because of other valid PM
self.assertEqual(ps["entry"][0]["resource"]["physicalMeasurementsStatus"], "COMPLETED")
self.assertEqual(ps["entry"][0]["resource"]["physicalMeasurementsCreatedSite"], "hpo-site-monroeville")
self.assertEqual(ps["entry"][0]["resource"]["physicalMeasurementsFinalizedSite"], "hpo-site-bannerphoenix")
def test_make_pm_after_cancel_first_pm(self):
_id = self.participant_id.strip("P")
self.send_consent(self.participant_id)
measurement = load_measurement_json(self.participant_id)
measurement2 = load_measurement_json(self.participant_id)
path = "Participant/%s/PhysicalMeasurements" % self.participant_id
response = self.send_post(path, measurement)
path = path + "/" + response["id"]
cancel_info = BaseTestCase.get_restore_or_cancel_info()
self.send_patch(path, cancel_info)
# send another PM
path = "Participant/%s/PhysicalMeasurements" % self.participant_id
self.send_post(path, measurement2)
path = path + "/" + response["id"]
response = self.send_get(path)
composition = self.get_composition_resource_from_fhir_doc(response)
self.assertEqual(composition["status"], "entered-in-error")
count = 0
for ext in composition['extension']:
if 'cancelled-site' in ext['url']:
self.assertEqual(ext['valueInteger'], 1)
count += 1
if 'cancelled-username' in ext['url']:
self.assertEqual(ext['valueString'], '[email protected]')
count += 1
if 'cancelled-reason' in ext['url']:
self.assertEqual(ext['valueString'], 'a mistake was made.')
count += 1
if 'authored-location' in ext['url']:
self.assertEqual(ext['valueString'], 'Location/hpo-site-monroeville')
count += 1
if 'finalized-location' in ext['url']:
self.assertEqual(ext['valueString'], 'Location/hpo-site-bannerphoenix')
count += 1
self.assertEqual(count, 5)
ps = self.send_get("ParticipantSummary?participantId=%s" % _id)
# should be completed because of other valid PM
self.assertEqual(ps["entry"][0]["resource"]["physicalMeasurementsStatus"], "COMPLETED")
self.assertEqual(ps["entry"][0]["resource"]["physicalMeasurementsCreatedSite"], "hpo-site-monroeville")
self.assertEqual(ps["entry"][0]["resource"]["physicalMeasurementsFinalizedSite"], "hpo-site-bannerphoenix")
self.assertIsNotNone(ps["entry"][0]["resource"]["physicalMeasurementsTime"])
def test_make_pm_after_cancel_latest_pm(self):
_id = self.participant_id.strip("P")
self.send_consent(self.participant_id)
measurement = load_measurement_json(self.participant_id)
measurement2 = load_measurement_json(self.participant_id)
path = "Participant/%s/PhysicalMeasurements" % self.participant_id
with FakeClock(self.time1):
self.send_post(path, measurement)
with FakeClock(self.time2):
response2 = self.send_post(path, measurement2)
# cancel latest PM
path = path + "/" + response2["id"]
cancel_info = BaseTestCase.get_restore_or_cancel_info()
self.send_patch(path, cancel_info)
response = self.send_get(path)
composition = self.get_composition_resource_from_fhir_doc(response)
self.assertEqual(composition["status"], "entered-in-error")
count = 0
for ext in composition['extension']:
if 'cancelled-site' in ext['url']:
self.assertEqual(ext['valueInteger'], 1)
count += 1
if 'cancelled-username' in ext['url']:
self.assertEqual(ext['valueString'], '[email protected]')
count += 1
if 'cancelled-reason' in ext['url']:
self.assertEqual(ext['valueString'], 'a mistake was made.')
count += 1
if 'authored-location' in ext['url']:
self.assertEqual(ext['valueString'], 'Location/hpo-site-monroeville')
count += 1
if 'finalized-location' in ext['url']:
self.assertEqual(ext['valueString'], 'Location/hpo-site-bannerphoenix')
count += 1
self.assertEqual(count, 5)
ps = self.send_get("ParticipantSummary?participantId=%s" % _id)
# should still get first PM in participant summary
self.assertEqual(ps["entry"][0]["resource"]["physicalMeasurementsStatus"], "COMPLETED")
self.assertEqual(ps["entry"][0]["resource"]["physicalMeasurementsCreatedSite"], "hpo-site-monroeville")
self.assertEqual(ps["entry"][0]["resource"]["physicalMeasurementsFinalizedSite"], "hpo-site-bannerphoenix")
self.assertIsNotNone(ps["entry"][0]["resource"]["physicalMeasurementsTime"])
self.assertEqual(ps["entry"][0]["resource"]["physicalMeasurementsTime"], self.time1.isoformat())
def test_cancel_single_pm_returns_cancelled_in_summary(self):
_id = self.participant_id.strip("P")
self.send_consent(self.participant_id)
measurement = load_measurement_json(self.participant_id)
path = "Participant/%s/PhysicalMeasurements" % self.participant_id
response = self.send_post(path, measurement)
path = path + "/" + response["id"]
cancel_info = BaseTestCase.get_restore_or_cancel_info()
self.send_patch(path, cancel_info)
response = self.send_get(path)
composition = self.get_composition_resource_from_fhir_doc(response)
self.assertEqual(composition["status"], "entered-in-error")
count = 0
for ext in composition['extension']:
if 'cancelled-site' in ext['url']:
self.assertEqual(ext['valueInteger'], 1)
count += 1
if 'cancelled-username' in ext['url']:
self.assertEqual(ext['valueString'], '[email protected]')
count += 1
if 'cancelled-reason' in ext['url']:
self.assertEqual(ext['valueString'], 'a mistake was made.')
count += 1
if 'authored-location' in ext['url']:
self.assertEqual(ext['valueString'], 'Location/hpo-site-monroeville')
count += 1
if 'finalized-location' in ext['url']:
self.assertEqual(ext['valueString'], 'Location/hpo-site-bannerphoenix')
count += 1
self.assertEqual(count, 5)
ps = self.send_get("ParticipantSummary?participantId=%s" % _id)
self.assertEqual(ps["entry"][0]["resource"]["physicalMeasurementsStatus"], "CANCELLED")
self.assertNotIn("physicalMeasurementsTime", ps["entry"][0]["resource"])
self.assertNotIn("physicalMeasurementsFinalizedSiteId", ps["entry"][0]["resource"])
self.assertEqual("UNSET", ps["entry"][0]["resource"]["physicalMeasurementsFinalizedSite"])
def test_restore_a_physical_measurement(self):
self.send_consent(self.participant_id)
measurement = load_measurement_json(self.participant_id)
path = "Participant/%s/PhysicalMeasurements" % self.participant_id
response = self.send_post(path, measurement)
ps = self.send_get("Participant/%s/Summary" % self.participant_id)
path = path + "/" + response["id"]
self.send_patch(path, BaseTestCase.get_restore_or_cancel_info())
ps_2 = self.send_get("Participant/%s/Summary" % self.participant_id)
self.assertTrue("physicalMeasurementsFinalizedTime" not in ps_2)
restored_info = BaseTestCase.get_restore_or_cancel_info(reason="need to restore", status="restored", author="me")
self.send_patch(path, restored_info)
response = self.send_get(path)
composition = self.get_composition_resource_from_fhir_doc(response)
self.assertEqual(composition["status"], "final")
count = 0
cancelled = 0
for ext in composition['extension']:
if 'restore-site' in ext['url']:
self.assertEqual(ext['valueInteger'], 1)
count += 1
if 'restore-username' in ext['url']:
self.assertEqual(ext['valueString'], 'me')
count += 1
if 'restore-reason' in ext['url']:
self.assertEqual(ext['valueString'], 'need to restore')
count += 1
if 'cancelled' in ext['url']:
cancelled += 1
self.assertEqual(count, 3)
# Response should not contain cancelledInfo
self.assertEqual(cancelled, 0)
ps_3 = self.send_get("Participant/%s/Summary" % self.participant_id)
self.assertEqual(ps_3['physicalMeasurementsFinalizedTime'], ps['physicalMeasurementsFinalizedTime'])
def test_cannot_restore_a_valid_pm(self):
self.send_consent(self.participant_id)
measurement = load_measurement_json(self.participant_id)
path = "Participant/%s/PhysicalMeasurements" % self.participant_id
response = self.send_post(path, measurement)
path = path + "/" + response["id"]
restored_info = BaseTestCase.get_restore_or_cancel_info(reason="need to restore", status="restored", author="me")
self.send_patch(path, restored_info, expected_status=http.client.BAD_REQUEST)
def test_cannot_cancel_a_cancelled_pm(self):
self.send_consent(self.participant_id)
measurement = load_measurement_json(self.participant_id)
path = "Participant/%s/PhysicalMeasurements" % self.participant_id
response = self.send_post(path, measurement)
path = path + "/" + response["id"]
self.send_patch(path, BaseTestCase.get_restore_or_cancel_info())
self.send_patch(path, BaseTestCase.get_restore_or_cancel_info(), expected_status=http.client.BAD_REQUEST)
def test_cancel_an_ammended_order(self):
self.send_consent(self.participant_id)
measurements_1 = load_measurement_json(self.participant_id)
path = "Participant/%s/PhysicalMeasurements" % self.participant_id
response = self.send_post(path, measurements_1)
measurements_2 = load_measurement_json_amendment(self.participant_id, response["id"])
response_2 = self.send_post(path, measurements_2)
path = path + "/" + response_2["id"]
cancel_info = BaseTestCase.get_restore_or_cancel_info()
self.send_patch(path, cancel_info)
response = self.send_get(path)
composition = self.get_composition_resource_from_fhir_doc(response)
self.assertEqual(composition["status"], "entered-in-error")
count = 0
for ext in composition['extension']:
if 'cancelled-site' in ext['url']:
self.assertEqual(ext['valueInteger'], 1)
count += 1
if 'cancelled-username' in ext['url']:
self.assertEqual(ext['valueString'], '[email protected]')
count += 1
if 'cancelled-reason' in ext['url']:
self.assertEqual(ext['valueString'], 'a mistake was made.')
count += 1
if 'amends' in ext['url']:
self.assertIn('PhysicalMeasurements/', ext['valueReference']['reference'])
count += 1
self.assertEqual(count, 4)
| bsd-3-clause | -5,143,977,278,125,152,000 | 46.418018 | 121 | 0.620473 | false |
edf-hpc/hpcstats | HPCStats/Model/Cluster.py | 1 | 6981 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2011-2015 EDF SA
# Contact:
# CCN - HPC <[email protected]>
# 1, Avenue du General de Gaulle
# 92140 Clamart
#
# Authors: CCN - HPC <[email protected]>
#
# This file is part of HPCStats.
#
# HPCStats is free software: you can redistribute in and/or
# modify it under the terms of the GNU General Public License,
# version 2, as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with HPCStats. If not, see
# <http://www.gnu.org/licenses/>.
#
# On Calibre systems, the complete text of the GNU General
# Public License can be found in `/usr/share/common-licenses/GPL'.
"""
Schema of the ``Cluster`` table in HPCStats database:
.. code-block:: sql
Cluster(
cluster_id SERIAL,
cluster_name character varying(30) NOT NULL,
CONSTRAINT Cluster_pkey PRIMARY KEY (cluster_id),
CONSTRAINT Cluster_unique UNIQUE (cluster_name)
)
"""
import logging
logger = logging.getLogger(__name__)
from HPCStats.Exceptions import HPCStatsDBIntegrityError, HPCStatsRuntimeError
class Cluster(object):
"""Model class for Cluster table"""
def __init__(self, name, cluster_id=None):
self.cluster_id = cluster_id
self.name = name
def __str__(self):
return self.name
def __eq__(self, other):
return self.name == other.name
def find(self, db):
"""Search the Cluster in the database based on its name. If exactly
one cluster matches in database, set cluster_id attribute properly
and returns its value. If more than one cluster matches, raises
HPCStatsDBIntegrityError. If no cluster is found, returns None.
"""
req = """
SELECT cluster_id
FROM Cluster
WHERE cluster_name = %s
"""
params = ( self.name, )
db.execute(req, params)
nb_rows = db.cur.rowcount
if nb_rows == 0:
logger.debug("cluster %s not found in DB", str(self))
return None
elif nb_rows > 1:
raise HPCStatsDBIntegrityError(
"several cluster_id found in DB for cluster %s" \
% (str(self)))
else:
self.cluster_id = db.cur.fetchone()[0]
logger.debug("cluster %s found in DB with id %d",
str(self),
self.cluster_id )
return self.cluster_id
def save(self, db):
"""Insert Cluster in database. You must make sure that the Cluster does
not already exist in database yet (typically using Cluster.find()
method else there is a risk of future integrity errors because of
duplicated clusters. If cluster_id attribute is set, it raises
HPCStatsRuntimeError.
"""
if self.cluster_id is not None:
raise HPCStatsRuntimeError(
"could not insert cluster %s since already existing in "\
"database" \
% (str(self)))
req = """
INSERT INTO Cluster ( cluster_name )
VALUES ( %s )
RETURNING cluster_id
"""
params = ( self.name, )
#print db.cur.mogrify(req, params)
db.execute(req, params)
self.cluster_id = db.cur.fetchone()[0]
def get_nb_cpus(self, db):
"""Returns the total number of CPUs available on the cluster"""
if self.cluster_id is None:
raise HPCStatsRuntimeError(
"could not search for data with cluster %s since not " \
"found in database" \
% (str(self)))
req = """
SELECT SUM(node_nbCpu)
FROM Node
WHERE cluster_id = %s
"""
params = ( self.cluster_id, )
#print db.cur.mogrify(req, params)
db.execute(req, params)
return db.cur.fetchone()[0]
def get_min_datetime(self, db):
"""Returns the start datetime of the oldest started and unfinished
job on the cluster.
"""
if self.cluster_id is None:
raise HPCStatsRuntimeError(
"could not search for data with cluster %s since not " \
"found in database" \
% (str(self)))
req = """
SELECT MIN(job_start)
FROM Job
WHERE cluster_id = %s
AND job_state NOT IN ('CANCELLED', 'NODE_FAIL', 'PENDING')
"""
params = ( self.cluster_id, )
#print db.cur.mogrify(req, params)
db.execute(req, params)
return db.cur.fetchone()[0]
def get_nb_accounts(self, db, creation_date):
"""Returns the total of users on the cluster whose account have been
created defore date given in parameter.
"""
if self.cluster_id is None:
raise HPCStatsRuntimeError(
"could not search for data with cluster %s since not " \
"found in database" \
% (str(self)))
req = """
SELECT COUNT (userhpc_id)
FROM Userhpc,
Account
WHERE Account.userhpc_id = Userhpc.userhpc_id
AND Account.account_creation < %s
AND Account.cluster_id = %s
"""
params = (creation_date, self.cluster_id )
#print db.cur.mogrify(req, params)
db.execute(req, params)
return db.cur.fetchone()[0]
def get_nb_active_users(self, db, start, end):
"""Returns the total number of users who have run job(s) on the cluster
between start and end datetimes in parameters.
"""
if self.cluster_id is None:
raise HPCStatsRuntimeError(
"could not search for data with cluster %s since not " \
"found in database" \
% (str(self)))
req = """
SELECT COUNT(DISTINCT userhpc_id)
FROM Job
WHERE Job.cluster_id = %s,
AND ((job_start BETWEEN %s AND %s)
OR (job_end BETWEEN %s AND %s)
OR (job_start <= %s AND job_end >= %s))
"""
params = (self.cluster_id, start, end, start, end, start, end)
#print db.cur.mogrify(req, params)
db.execute(req, params)
return db.cur.fetchone()[0]
| gpl-2.0 | -7,844,108,807,252,249,000 | 32.085308 | 79 | 0.545481 | false |
MangoMangoDevelopment/neptune | lib/ros_comm-1.12.0/tools/rosgraph/test/test_names.py | 2 | 11745 | # Software License Agreement (BSD License)
#
# Copyright (c) 2009, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import os
import sys
def test_get_ros_namespace():
if 'ROS_NAMESPACE' in os.environ:
rosns = os.environ['ROS_NAMESPACE']
del os.environ['ROS_NAMESPACE']
else:
rosns = None
sysargv = sys.argv
from rosgraph.names import get_ros_namespace
try:
sys.argv = []
assert '/' == get_ros_namespace()
assert '/' == get_ros_namespace(argv=[])
assert '/' == get_ros_namespace(env={})
assert '/' == get_ros_namespace(env={}, argv=[])
os.environ['ROS_NAMESPACE'] = 'unresolved'
assert '/unresolved/' == get_ros_namespace()
assert '/unresolved/' == get_ros_namespace(env={'ROS_NAMESPACE': 'unresolved'})
sys.argv = ['foo', '__ns:=unresolved_override']
assert '/unresolved_override/' == get_ros_namespace(env={'ROS_NAMESPACE': 'unresolved'})
assert '/override2/' == get_ros_namespace(env={'ROS_NAMESPACE': 'unresolved'}, argv=['foo', '__ns:=override2'])
sys.argv = []
os.environ['ROS_NAMESPACE'] = '/resolved/'
assert '/resolved/' == get_ros_namespace()
assert '/resolved/' == get_ros_namespace(env={'ROS_NAMESPACE': '/resolved'})
del os.environ['ROS_NAMESPACE']
sys.argv = ['foo', '__ns:=unresolved_ns']
assert '/unresolved_ns/' == get_ros_namespace()
assert '/unresolved_ns2/' == get_ros_namespace(argv=['foo', '__ns:=unresolved_ns2'])
sys.argv = ['foo', '__ns:=/resolved_ns/']
assert '/resolved_ns/' == get_ros_namespace()
assert '/resolved_ns2/' == get_ros_namespace(argv=['foo', '__ns:=resolved_ns2'])
finally:
sys.argv = sysargv
# restore
if rosns:
os.environ['ROS_NAMESPACE'] = rosns
def test_make_global_ns():
from rosgraph.names import make_global_ns
for n in ['~foo']:
try:
make_global_ns(n)
assert False, "make_global_ns should fail on %s"%n
except ValueError: pass
assert '/foo/' == make_global_ns('foo')
assert '/' == make_global_ns('')
assert '/foo/' == make_global_ns('/foo')
assert '/foo/' == make_global_ns('/foo/')
assert '/foo/bar/' == make_global_ns('/foo/bar')
assert '/foo/bar/' == make_global_ns('/foo/bar/')
def test_make_caller_id():
from rosgraph.names import make_caller_id
if 'ROS_NAMESPACE' is os.environ:
rosns = os.environ['ROS_NAMESPACE']
del os.environ['ROS_NAMESPACE']
else:
rosns = None
for n in ['~name']:
try:
make_caller_id('~name') # illegal
assert False, "make_caller_id should fail on %s"%n
except ValueError:
pass
assert '/node/' == make_caller_id('node')
assert '/bar/node/' == make_caller_id('bar/node')
assert '/bar/node/' == make_caller_id('/bar/node')
os.environ['ROS_NAMESPACE'] = '/test/'
assert '/test/node/' == make_caller_id('node')
assert '/test/bar/node/' == make_caller_id('bar/node')
assert '/bar/node/' == make_caller_id('/bar/node')
# restore
if rosns:
os.environ['ROS_NAMESPACE'] = rosns
def test_is_global():
from rosgraph.names import is_global
try:
is_global(None)
assert False, "is_global should raise exception on invalid param"
except: pass
tests = ['/', '/global', '/global2']
for t in tests:
assert is_global(t)
fails = ['', 'not_global', 'not/global']
for t in fails:
assert not is_global(t)
def test_is_private():
from rosgraph.names import is_private
try:
is_private(None)
assert False, "is_private should raise exception on invalid param"
except: pass
tests = ['~name', '~name/sub']
for t in tests:
assert is_private(t)
fails = ['', 'not_private', 'not/private', 'not/~private', '/not/~private']
for t in fails:
assert not is_private(t)
def test_namespace():
from rosgraph.names import namespace
try:
namespace(1)
assert False, "1"
except TypeError: pass
try:
namespace(None)
assert False, "None"
except ValueError: pass
assert '/'== namespace('')
assert '/'== namespace('/')
assert '/'== namespace('/foo')
assert '/'== namespace('/foo/')
assert '/foo/'== namespace('/foo/bar')
assert '/foo/'== namespace('/foo/bar/')
assert '/foo/bar/'== namespace('/foo/bar/baz')
assert '/foo/bar/'== namespace('/foo/bar/baz/')
# unicode tests
assert u'/'== namespace(u'')
assert u'/'== namespace(u'/')
assert u'/foo/bar/'== namespace(u'/foo/bar/baz/')
def test_nsjoin():
from rosgraph.names import ns_join
# private and global names cannot be joined
assert '~name' == ns_join('/foo', '~name')
assert '/name' == ns_join('/foo', '/name')
assert '~name' == ns_join('~', '~name')
assert '/name' == ns_join('/', '/name')
# ns can be '~' or '/'
assert '~name' == ns_join('~', 'name')
assert '/name' == ns_join('/', 'name')
assert '/ns/name' == ns_join('/ns', 'name')
assert '/ns/name' == ns_join('/ns/', 'name')
assert '/ns/ns2/name' == ns_join('/ns', 'ns2/name')
assert '/ns/ns2/name' == ns_join('/ns/', 'ns2/name')
# allow ns to be empty
assert 'name' == ns_join('', 'name')
def test_load_mappings():
from rosgraph.names import load_mappings
assert {} == load_mappings([])
assert {} == load_mappings(['foo'])
assert {} == load_mappings([':='])
assert {} == load_mappings([':=:='])
assert {} == load_mappings(['f:='])
assert {} == load_mappings([':=b'])
assert {} == load_mappings(['foo:=bar:=baz'])
# should ignore node param assignments
assert {} == load_mappings(['_foo:=bar'])
assert {'foo': 'bar'} == load_mappings(['foo:=bar'])
# should allow double-underscore names
assert {'__foo': 'bar'} == load_mappings(['__foo:=bar'])
assert {'foo': 'bar'} == load_mappings(['./f', '-x', '--blah', 'foo:=bar'])
assert {'a': '1', 'b': '2', 'c': '3'} == load_mappings(['c:=3', 'c:=', ':=3', 'a:=1', 'b:=2'])
def test_is_legal_name():
from rosgraph.names import is_legal_name
failures = [None,
'foo++', 'foo-bar', '#foo',
'hello\n', '\t', ' name', 'name ',
'f//b',
'1name', 'foo\\']
for f in failures:
assert not is_legal_name(f), f
tests = ['',
'f', 'f1', 'f_', 'f/', 'foo', 'foo_bar', 'foo/bar', 'foo/bar/baz',
'~f', '~a/b/c',
'~/f',
'/a/b/c/d', '/']
for t in tests:
assert is_legal_name(t), "[%s]"%t
def test_is_legal_base_name():
from rosgraph.names import is_legal_base_name
failures = [None, '', 'hello\n', '\t', 'foo++', 'foo-bar', '#foo',
'f/', 'foo/bar', '/', '/a',
'f//b',
'~f', '~a/b/c',
' name', 'name ',
'1name', 'foo\\']
for f in failures:
assert not is_legal_base_name(f), f
tests = ['f', 'f1', 'f_', 'foo', 'foo_bar']
for t in tests:
assert is_legal_base_name(t), "[%s]"%t
def test_resolve_name():
from rosgraph.names import resolve_name
# TODO: test with remappings
tests = [
('', '/', '/'),
('', '/node', '/'),
('', '/ns1/node', '/ns1/'),
('foo', '', '/foo'),
('foo/', '', '/foo'),
('/foo', '', '/foo'),
('/foo/', '', '/foo'),
('/foo', '/', '/foo'),
('/foo/', '/', '/foo'),
('/foo', '/bar', '/foo'),
('/foo/', '/bar', '/foo'),
('foo', '/ns1/ns2', '/ns1/foo'),
('foo', '/ns1/ns2/', '/ns1/foo'),
('foo', '/ns1/ns2/ns3/', '/ns1/ns2/foo'),
('foo/', '/ns1/ns2', '/ns1/foo'),
('/foo', '/ns1/ns2', '/foo'),
('foo/bar', '/ns1/ns2', '/ns1/foo/bar'),
('foo//bar', '/ns1/ns2', '/ns1/foo/bar'),
('foo/bar', '/ns1/ns2/ns3', '/ns1/ns2/foo/bar'),
('foo//bar//', '/ns1/ns2/ns3', '/ns1/ns2/foo/bar'),
('~foo', '/', '/foo'),
('~foo', '/node', '/node/foo'),
('~foo', '/ns1/ns2', '/ns1/ns2/foo'),
('~foo/', '/ns1/ns2', '/ns1/ns2/foo'),
('~foo/bar', '/ns1/ns2', '/ns1/ns2/foo/bar'),
# #3044
('~/foo', '/', '/foo'),
('~/foo', '/node', '/node/foo'),
('~/foo', '/ns1/ns2', '/ns1/ns2/foo'),
('~/foo/', '/ns1/ns2', '/ns1/ns2/foo'),
('~/foo/bar', '/ns1/ns2', '/ns1/ns2/foo/bar'),
]
for name, node_name, v in tests:
assert v == resolve_name(name, node_name)
def test_anonymous_name():
from rosgraph.names import anonymous_name, is_legal_name
val = anonymous_name('foo')
assert 'foo' in val
assert 'foo' != val
assert val != anonymous_name('foo')
assert not '/' in val
assert is_legal_name(val)
def test_script_resolve_name():
from rosgraph.names import script_resolve_name, get_ros_namespace, ns_join
assert '/global' == script_resolve_name('/myscript', '/global')
val = script_resolve_name('/myscript', '')
assert get_ros_namespace() == val, val
val = script_resolve_name('/myscript', 'foo')
assert ns_join(get_ros_namespace(), 'foo') == val, val
assert '/myscript/private' == script_resolve_name('/myscript', '~private')
def test_canonicalize_name():
from rosgraph.names import canonicalize_name
tests = [
('', ''),
('/', '/'),
('foo', 'foo'),
('/foo', '/foo'),
('/foo/', '/foo'),
('/foo/bar', '/foo/bar'),
('/foo/bar/', '/foo/bar'),
('/foo/bar//', '/foo/bar'),
('/foo//bar', '/foo/bar'),
('//foo/bar', '/foo/bar'),
('foo/bar', 'foo/bar'),
('foo//bar', 'foo/bar'),
('foo/bar/', 'foo/bar'),
('/foo/bar', '/foo/bar'),
]
for t, v in tests:
assert v == canonicalize_name(t)
| bsd-3-clause | -3,297,891,609,819,803,000 | 34.917431 | 119 | 0.536569 | false |
airanmehr/bio | Scripts/Plasmodium/Data.py | 1 | 7725 | '''
Copyleft Oct 10, 2015 Arya Iranmehr, PhD Student, Bafna's Lab, UC San Diego, Email: [email protected]
'''
import numpy as np
import pandas as pd
import os,sys;home=os.path.expanduser('~') +'/'
class Data:
@staticmethod
def read(param):
"""
data is sorted first by Chrom and then POS in addGlobalPos. Important to have them sorted together
"""
try:
meta=pd.read_pickle(param['dspath']+param['dsname']+'.meta.df')
snp=pd.read_pickle(param['dspath']+param['dsname']+'.snp.df')
except:
if param['Region']=='Peru' and param['dsname']=='all':
meta= Data.readPeruAll()
elif param['Region']=='Peru' and param['dsname']=='winzeler':
meta= Data.readPeruFiltered()
elif param['Region']=='Sudan':
meta= Data.readSudan()
else:
print >> sys.stderr, 'Bad Parameter: ',param
exit()
meta= Data.removeNonPolymorphicandTriAllele(meta, param)
meta = Data.correctCall(meta, param)
meta= Data.computeRC(meta, param)
meta.ix[:,'hetero']= meta[param['names']].apply(lambda x: ((x=='0/1')|(x=='1/0')).sum(),axis=1)
meta=pd.concat([meta, meta[param['names']].apply(lambda x: x.value_counts(),axis=1).fillna(0)],axis=1)
meta['0/1']+=meta['1/0'];meta.drop(['1/0'],axis=1,inplace=True)
calls=meta[param['names']]
snp=pd.concat([pd.DataFrame(calls.applymap(lambda x: x.split('/')[0]).values, columns=calls.columns+'maj') , pd.DataFrame(calls.applymap(lambda x: x.split('/')[1]).values, columns=calls.columns+'min')],axis=1).astype(int).T.sort_index();snp.columns=calls.index.values #major is always zero in heterozygotes in the other getsnp function 1/0 is possible for example line 7 mdio08 in the xlsx
from Scripts.Plasmodium.Run import runHW
meta=runHW(param,meta)
meta.to_pickle(param['dspath']+param['dsname']+'.meta.df')
snp.to_pickle(param['dspath']+param['dsname']+'.snp.df')
return snp,meta
@staticmethod
def computeRC(meta,param):
meta.ix[:,map(lambda x: x[-2:]=='rc' ,meta.columns.values)]=meta.ix[:,map(lambda x: x[-2:]=='rc' ,meta.columns.values)].astype(int)
meta.ix[:,'totrc']=0
for n in param['names']:
meta.ix[:,n+'rc']=meta[n+'majrc']+meta[n+'minrc']
meta['totrc']+=meta[n+'majrc']+meta[n+'minrc']
return meta
@staticmethod
def correctCall(meta,param):
names=[]
for x in param['names']:
names+= [x +'maj']
names+= [x +'min']
b=meta[names].apply(lambda x: x!=meta.REF,axis=0).astype(int)
c=pd.DataFrame(b.apply(lambda c: map(lambda x: '{}/{}'.format(x[0],x[1]), zip(c[np.arange(0,12,2)].values,c[np.arange(1,12,2)].values)) +list('000000'),axis=1).icol(range(6)))
c.columns=param['names']
meta.loc[:,param['names']]=c
return meta
@staticmethod
def removeNonPolymorphicandTriAllele(meta,param):
geno=meta[param['names']].apply(lambda x: x.value_counts(), axis=1)
geno.fillna(0,inplace=True)
biallele=param['biallele']
print 'Total sites: {}'.format(meta.shape[0])
print 'Discarding {} tri-allelic sites...'.format(sum(geno[biallele].sum(1)!=6))
print 'Discarding {} non-polymorphic sites...'.format(sum((geno['0/0']==6) | (geno['1/1']==6)))
idx= (geno[biallele].sum(1)==6) &(geno[biallele]['0/0']!=6) & (geno[biallele]['1/1']!=6)
return meta[idx]
@staticmethod
def getName(dsname,Peru):
return ('Sudan', ('PeruFiltered','PeruAll')[not dsname])[Peru]
# return ('Sudan', ('PeruFiltered','PeruAll')[not dsname]
@staticmethod
def readSudan():
df = Data.read_xl( home+ 'datasets/vineet/additionalmergedPvivax.xlsx')
df=Data.addGlobalPos(df)
meta=df.icol(range(9))
# geno=df.icol(range(9,12))
# SNP=Data.getSNP(geno, meta.REF)
return meta
@staticmethod
def getSNP(calls,ref):
SNP=np.zeros(calls.shape,dtype=int)
for j in range(calls.shape[1]):
SNP[:,j]= (ref!=calls.icol(j)).astype(int)
return pd.DataFrame(SNP.T,calls.columns,columns=calls.index).sort_index()
@staticmethod
def readPeruAll():
path=home+ 'datasets/popgen/mdio_annotated_passcalled_filteredwindow_majmin_nocontigs_32K.df'
try:
df=pd.read_pickle(path)
except:
df=Data.read_xl(path.replace('.df','.xlsx'))
df=Data.addGlobalPos(df)
df.to_pickle(path)
return df
@staticmethod
def correctWeinzlerFilterDataColumnNames(df):
names=map(unicode.lower,df.loc[0].dropna().values)
df.dropna(inplace=True)
df.columns=df.iloc[0].values.copy()
df=df.iloc[1:]
df.rename(columns={'substitution AA change':'AAeff', 'Alt allele':'ALT','quality':'QUAL', 'gene id':'GENE', 'Chromosome': '#CHROM', 'position': 'POS', 'Ref allele':'REF', 'substitution effect':'TYPE','minor allele read count': 'minrc', 'maj allele read count': 'majrc','min allele': 'min', 'maj allele': 'maj'}, inplace=True)
i=8
for n in names:
for _ in range( 5):
if df.columns.values[i]=='genotype':
df.columns.values[i]= n
else:
df.columns.values[i]= n+df.columns.values[i]
i+=1
return df
@staticmethod
def readPeruFiltered():
path=home+ 'datasets/popgen/SuppTableS2_SNVs.df'
try:
df = pd.read_pickle(path)
except:
df=Data.read_xl(path.replace('.df','.xlsx'))
df=Data.correctWeinzlerFilterDataColumnNames(df)
df=Data.addGlobalPos(df)
df.to_pickle(path)
# calls=pd.concat([df.icol(range(12,df.shape[1],5)), df.icol(range(13,df.shape[1],5))] , axis=1)
# SNP=Data.getSNP(calls, meta.REF)
return df
@staticmethod
def getChromOffset(results):
try:
chromOffset=results.groupby('#CHROM').end.agg('max').sort_index()
except AttributeError:
chromOffset=results.groupby('#CHROM').POS.agg('max').sort_index()
chromOffset.iloc[:]=chromOffset.values.cumsum()
chromOffset.loc[chromOffset.index.max()+1] =0
chromOffset.iloc[1:]=chromOffset.iloc[:-1].values
chromOffset.iloc[0]=0
return chromOffset
@staticmethod
def addGlobalPos(df):
Lens=df.groupby('#CHROM').POS.agg('max').values
chromOffset=np.append([1],Lens.cumsum())
df.insert(0, 'ChromOffset', chromOffset[(df['#CHROM'].values-1).astype(int)] )
df.insert(0, 'POSGlobal', chromOffset[(df['#CHROM'].values-1).astype(int)] +df.POS)
df.insert(0, 'CHROMLen', Lens[(df['#CHROM'].values-1).astype(int)] )
df.sort(['#CHROM', 'POS'],inplace=True) # Important
df.index=range(df.shape[0])
return df
@staticmethod
def read_xl(path):
xl_file = pd.ExcelFile(path)
return [ xl_file.parse(sheet_name) for sheet_name in xl_file.sheet_names][0]
@staticmethod
def getSNPfromString(x):
x=map(str.strip,x.split('\n'))
snp=[]
for l in x:
if len(l):
snp.append([])
for i in l:
snp[-1].append(int(i))
snp = np.array(snp)
return snp
@staticmethod
def getSNPfromDataframe(df):
return df[(df=='2').sum(axis=1)==0].transpose().values.astype(int)
| mit | 6,538,085,675,037,243,000 | 41.445055 | 401 | 0.574369 | false |
franklinsales/udacity-data-analyst-nanodegree | project3/class-works/data-wrangling/data-extract-fundamentals/set_problem_2013_ERCOT_Hourly_Load_Data_corrected.py | 1 | 3217 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 15 18:05:20 2017
@author: franklin
"""
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 15 13:30:33 2017
@author: franklin
"""
'''
Find the time and value of max load for each of the regions
COAST, EAST, FAR_WEST, NORTH, NORTH_C, SOUTHERN, SOUTH_C, WEST
and write the result out in a csv file, using pipe character | as the delimiter.
An example output can be seen in the "example.csv" file.
'''
import xlrd
import os
import csv
from zipfile import ZipFile
datafile = "data/2013_ERCOT_Hourly_Load_Data.xls"
outfile = "data/2013_Max_Loads.csv"
def open_zip(datafile):
with ZipFile('{0}.zip'.format(datafile), 'r') as myzip:
myzip.extractall()
def parse_file(datafile):
workbook = xlrd.open_workbook(datafile)
sheet = workbook.sheet_by_index(0)
data = {}
# process all rows that contain station data
for n in range (1, 9):
station = sheet.cell_value(0, n)
cv = sheet.col_values(n, start_rowx=1, end_rowx=None)
maxval = max(cv)
maxpos = cv.index(maxval) + 1
maxtime = sheet.cell_value(maxpos, 0)
realtime = xlrd.xldate_as_tuple(maxtime, 0)
data[station] = {"maxval": maxval,
"maxtime": realtime}
print data
return data
def save_file(data, filename):
with open(filename, "w") as f:
w = csv.writer(f, delimiter='|')
w.writerow(["Station", "Year", "Month", "Day", "Hour", "Max Load"])
for s in data:
year, month, day, hour, _ , _= data[s]["maxtime"]
w.writerow([s, year, month, day, hour, data[s]["maxval"]])
def test():
#open_zip(datafile)
data = parse_file(datafile)
save_file(data, outfile)
number_of_rows = 0
stations = []
ans = {'FAR_WEST': {'Max Load': '2281.2722140000024',
'Year': '2013',
'Month': '6',
'Day': '26',
'Hour': '17'}}
correct_stations = ['COAST', 'EAST', 'FAR_WEST', 'NORTH',
'NORTH_C', 'SOUTHERN', 'SOUTH_C', 'WEST']
fields = ['Year', 'Month', 'Day', 'Hour', 'Max Load']
with open(outfile) as of:
csvfile = csv.DictReader(of, delimiter="|")
for line in csvfile:
print line
station = line['Station']
if station == 'FAR_WEST':
for field in fields:
# Check if 'Max Load' is within .1 of answer
if field == 'Max Load':
max_answer = round(float(ans[station][field]), 1)
max_line = round(float(line[field]), 1)
assert max_answer == max_line
# Otherwise check for equality
else:
assert ans[station][field] == line[field]
number_of_rows += 1
stations.append(station)
# Output should be 8 lines not including header
assert number_of_rows == 8
# Check Station Names
assert set(stations) == set(correct_stations)
if __name__ == "__main__":
test()
| mit | 2,643,257,169,679,058,000 | 27.723214 | 80 | 0.539322 | false |
kivy/pyjnius | tests/test_collections.py | 1 | 1439 | from __future__ import absolute_import
import unittest
from jnius import autoclass, protocol_map
class TestCollections(unittest.TestCase):
def test_hashset(self):
hset = autoclass('java.util.HashSet')()
data = {1,2}
# add is in both Python and Java
for k in data:
hset.add(k)
# __len__
print(dir(hset))
self.assertEqual(len(data), len(hset))
# __contains__
for k in data:
self.assertTrue(k in hset)
self.assertFalse(0 in hset)
# __iter__
for k in hset:
self.assertTrue(k in data)
# __delitem__
for k in data:
del(hset[k])
self.assertFalse(k in hset)
def test_hashmap(self):
hmap = autoclass('java.util.HashMap')()
data = {1 : 'hello', 2 : 'world'}
# __setitem__
for k,v in data.items():
hmap[k] = v
# __len__
self.assertEqual(len(data), len(hmap))
# __contains__
for k,v in data.items():
self.assertTrue(k in hmap)
self.assertEqual(data[k], hmap[k])
# __iter__
for k in hmap:
self.assertTrue(k in data)
# __contains__
self.assertFalse(0 in hmap)
# __delitem__
for k in data:
del(hmap[k])
self.assertFalse(k in hmap)
if __name__ == '__main__':
unittest.main()
| mit | 6,931,873,810,327,857,000 | 26.673077 | 47 | 0.502432 | false |
Azure/azure-sdk-for-python | sdk/recoveryservices/azure-mgmt-recoveryservicesbackup/azure/mgmt/recoveryservicesbackup/aio/operations/_backup_protected_items_crr_operations.py | 1 | 6308 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class BackupProtectedItemsCrrOperations:
"""BackupProtectedItemsCrrOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.recoveryservicesbackup.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
vault_name: str,
resource_group_name: str,
filter: Optional[str] = None,
skip_token: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable["_models.ProtectedItemResourceList"]:
"""Provides a pageable list of all items that are backed up within a vault.
:param vault_name: The name of the recovery services vault.
:type vault_name: str
:param resource_group_name: The name of the resource group where the recovery services vault is
present.
:type resource_group_name: str
:param filter: OData filter options.
:type filter: str
:param skip_token: skipToken Filter.
:type skip_token: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProtectedItemResourceList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.recoveryservicesbackup.models.ProtectedItemResourceList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ProtectedItemResourceList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-20"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'vaultName': self._serialize.url("vault_name", vault_name, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
if skip_token is not None:
query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ProtectedItemResourceList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.NewErrorResponseAutoGenerated, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupProtectedItems/'} # type: ignore
| mit | 1,538,860,436,925,158,100 | 48.28125 | 191 | 0.64331 | false |
fyookball/electrum | plugins/shuffle_deprecated/crypto.py | 1 | 3181 | import ecdsa
from ecdsa.util import number_to_string, string_to_number
from electroncash.bitcoin import (generator_secp256k1, point_to_ser, EC_KEY,
Hash, InvalidPassword)
class CryptoError(Exception):
''' base class of a subset of the possible exceptions raised in this class
Subclasses have 4 items in their .args, see below '''
pass
class DecryptError(CryptoError):
''' always has 4 .args:
args[0] = programmer string message explaining what was caught
args[1] = the wrapped exception generatede by bitcoin.py (may be InvalidPassword or Exception)
args[2] = the private key used for decryption
args[3] = the message that failed for decrypt '''
pass
class EncryptError(CryptoError):
''' always has 4 .args:
args[0] = programmer string message explaining what was caught
args[1] = the wrapped exception generatede by bitcoin.py (may be InvalidPassword or Exception)
args[2] = the public key used for encryption
args[3] = the message that failed for decrypt '''
pass
class Crypto:
""" Functions related to cryptography """
def __init__(self):
self.G = generator_secp256k1
self._r = self.G.order()
self.private_key, self.eck, self.public_key = None, None, None
def generate_key_pair(self):
""" generate encryption/decryption pair """
self.private_key = ecdsa.util.randrange( self._r )
self.eck = EC_KEY(number_to_string(self.private_key, self._r))
self.public_key = point_to_ser(self.private_key*self.G, True)
def export_private_key(self):
""" Export private key as hex string """
if self.private_key:
return bytes.hex(number_to_string(self.private_key, self._r))
else:
return None
def restore_from_privkey(self, secret_string):
"restore key pair from private key expressed in a hex form"
self.private_key = string_to_number(bytes.fromhex(secret_string))
self.eck = EC_KEY(bytes.fromhex(secret_string))
self.public_key = point_to_ser(self.private_key*self.G, True)
def export_public_key(self):
""" serialization of public key """
return bytes.hex(self.public_key)
def encrypt(self, message, pubkey):
""" encrypt message with pubkey """
try:
res = self.eck.encrypt_message(message.encode('utf-8'), bytes.fromhex(pubkey))
return res.decode('utf-8')
except Exception as e: # grrr.. bitcoin.py raises 'Exception' :/
raise EncryptError("Bitcoin.py raised '{}' during Crypto.encrypt".format(type(e).__name__), e, pubkey, message) from e
def decrypt(self, message):
""" decrypt message """
try:
return self.eck.decrypt_message(message)
except (InvalidPassword, Exception) as e:
raise DecryptError("Bitcoin.py raised '{}' during Crypto.decrypt".format(type(e).__name__), e, self.private_key, message) from e
@staticmethod
def hash(text):
''' Returns sha256(sha256(text)) as bytes. text may be bytes or str. '''
return Hash(text) # bitcoin.Hash is sha256(sha256(x))
| mit | 2,820,235,096,471,262,700 | 39.782051 | 140 | 0.647281 | false |
hombit/scientific_python | scientific_python/e_testing/unittests.py | 1 | 1042 | #!/usr/bin/env python3
from __future__ import print_function, division, unicode_literals
import unittest
def is_even(n):
if int(n) != n:
raise ValueError('Argument should be integer')
return n % 2 == 0
class IsEvenTestCase(unittest.TestCase):
def test_even(self):
self.assertTrue(is_even(10), '10 is even')
def test_odd(self):
self.assertFalse(is_even(7), '7 is odd')
def test_float(self):
with self.assertRaises(ValueError,
msg='float number should lead to exception'):
is_even(3.14)
def test_non_integer(self):
with self.assertRaises((ValueError, TypeError),
msg='argument should be a number'):
is_even([2, 3])
# This function can be used in `setup.py` as `test_suite` keyword argument
def test_suite():
suite = unittest.defaultTestLoader.loadTestsFromName(__name__)
return suite
if __name__ == '__main__': # checks if this file executed as script
unittest.main()
| mit | 4,173,808,645,998,390,000 | 25.717949 | 76 | 0.610365 | false |
goett/TRPS | Tools/CrystalCalculator/slownessZnSe.py | 1 | 2953 | import math
import numpy as np
import continuum as dyn
from mpl_toolkits.mplot3d import axes3d
import matplotlib.pyplot as plt
from matplotlib import cm
#plt.rcParams['image.cmap'] = 'viridis';
numt = 161;
nump = 321;
t = np.linspace(0,math.pi,numt)
p = np.linspace(0,math.pi*2.0,nump)
print(t)
print(p)
pm,tm = np.meshgrid(p,t);
#vectors holding results for phase velocity
L = np.zeros(numt*nump);
FT = np.zeros(numt*nump);
ST = np.zeros(numt*nump);
rho = dyn.density('ZnSe')
C = dyn.elasticity('ZnSe')
# placeholders for
i = 0
for tt in t:
for pp in p:
Vp,pol = dyn.CalcPhaseVelocityPol(tt,pp,C,rho);
print(Vp)
L[i] = Vp[0];
FT[i] = Vp[1];
ST[i] = Vp[2];
i+=1
#Save resulting data
np.savetxt('ZnSeslowL.dat',L);
np.savetxt('ZnSeslowFT.dat',FT);
np.savetxt('ZnSeslowST.dat',ST);
Lm = L.reshape(numt,nump);
Lmc = Lm/np.amax(L);
FTm = FT.reshape(numt,nump);
FTmc = FTm/np.amax(FT);
STm = ST.reshape(numt,nump);
STmc = STm/np.amax(ST);
xl=np.sin(tm)*np.cos(pm)/Lm;
yl=np.sin(tm)*np.sin(pm)/Lm;
zl=np.cos(tm)/Lm;
xft=np.sin(tm)*np.cos(pm)/FTm;
yft=np.sin(tm)*np.sin(pm)/FTm;
zft=np.cos(tm)/FTm;
xst=np.sin(tm)*np.cos(pm)/STm;
yst=np.sin(tm)*np.sin(pm)/STm;
zst=np.cos(tm)/STm;
fig1 = plt.figure(figsize=(8.5,8.5));
#f,(ax, bx, cx) = plt.subplots(1,3);
ax = fig1.add_subplot(111,projection='3d');
#ax.plot_surface(xl,yl,zl,facecolors=Lmc,cmap='PuBuGn');
ax.plot_wireframe(xl,yl,zl,color='k',alpha=0.3);
cset = ax.contour(xl, yl, zl, zdir='z', offset=1.2*np.amin(zl), cmap=cm.coolwarm)
cset = ax.contour(xl, yl, zl, zdir='y', offset=1.2*np.amax(yl), cmap=cm.coolwarm)
cset = ax.contour(xl, yl, zl, zdir='x', offset=1.2*np.amin(xl), cmap=cm.coolwarm)
ax.set_xlim((1.2*np.amin(xl),1.2*np.amax(xl)));
ax.set_ylim((1.2*np.amin(yl),1.2*np.amax(yl)));
ax.set_zlim((1.2*np.amin(zl),1.2*np.amax(zl)));
fig2 = plt.figure(figsize=(8.5,8.5));
bx = fig2.add_subplot(111,projection='3d');
bx.plot_wireframe(xft,yft,zft,color='k',alpha=0.3);
cset = bx.contour(xft, yft, zft, zdir='z', offset=1.2*np.amin(zft), cmap=cm.coolwarm)
cset = bx.contour(xft, yft, zft, zdir='y', offset=1.2*np.amax(yft), cmap=cm.coolwarm)
cset = bx.contour(xft, yft, zft, zdir='x', offset=1.2*np.amin(xft), cmap=cm.coolwarm)
bx.set_xlim((1.2*np.amin(xft),1.2*np.amax(xft)));
bx.set_ylim((1.2*np.amin(yft),1.2*np.amax(yft)));
bx.set_zlim((1.2*np.amin(zft),1.2*np.amax(zft)));
fig3 = plt.figure(figsize=(8.5,8.5));
cx = fig3.add_subplot(111,projection='3d');
cx.plot_wireframe(xst,yst,zst,color='k',alpha=0.3);
cset = cx.contour(xst, yst, zst, zdir='z', offset=1.2*np.amin(zst), cmap=cm.coolwarm)
cset = cx.contour(xst, yst, zst, zdir='y', offset=1.2*np.amax(yst), cmap=cm.coolwarm)
cset = cx.contour(xst, yst, zst, zdir='x', offset=1.2*np.amin(xst), cmap=cm.coolwarm)
cx.set_xlim((1.2*np.amin(xst),1.2*np.amax(xst)));
cx.set_ylim((1.2*np.amin(yst),1.2*np.amax(yst)));
cx.set_zlim((1.2*np.amin(zst),1.2*np.amax(zst)));
#ax.set_axis_off()
plt.show()
| mit | 339,710,894,652,596,600 | 29.132653 | 85 | 0.655943 | false |
Nikolas1814/HackTues | webServer/all/views.py | 1 | 2898 | from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from django.template.loader import get_template
from django.template import Context
from django.http import StreamingHttpResponse
from django.http import HttpResponseRedirect
import datetime
from django.db.models import Q
import os.path
from news.forms import ClassesForm
from news.models import classes
from django.contrib.auth.models import *
from register.models import userInformation
from models import *
from forms import *
@csrf_exempt
def all(request):
if not request.user.is_authenticated():
return HttpResponseRedirect('/index/login/')
if request.method == 'POST':
html = validateForm(request)
return html
else:
form = HomeworkForm()
signals = homeworks.objects.filter().order_by("-id")
userInfo = userInformation.objects.filter(userID = request.user.id)
userName = request.user.first_name
t = get_template('all.html')
html = t.render(Context({'profilePicturePath' : userInfo[0].userProfilePicturePath, 'username' : userName, 'form' : form, 'signals' : signals}))
return StreamingHttpResponse(html)
def validateForm(request):
form = HomeworkForm(request.POST)
signals = homeworks.objects.filter().order_by('-modified')
if form.is_valid():
data = form.cleaned_data
homework = homeworks(date = data['date'], classesId = data['classHomework'], day = data['day'], petitionDescription = data['petitionDescription'],
homework = data['homework'], username = request.user.username, modified = datetime.datetime.now())
homework.save()
return HttpResponseRedirect('/index/all/')
t = get_template('all.html')
html = t.render(Context({'profilePicturePath' : userInfo[0].userProfilePicturePath, 'username' : userName, 'form' : form, 'signals' : signals}))
return StreamHttpResponse(html)
#def validateForm(request):
# form = ProgramForm(request.POST)
# if form.is_valid():
# data = form.cleaned_data
# pr = program(first = data['first'], second = data['second'], third = data['third'], fourth = data['fourth'],
# five = data['five'], sixth = data['sixth'], seven = data['seven'], eight = data['eight'], nine = data['nine'],
# ten = data['ten'],firstTime = data['firstTime'], secondTime = data['secondTime'], thirdTime = data['thirdTime'],
# fourthTime = data['fourthTime'],fiveTime = data['fiveTime'], sixthTime = data['sixthTime'], sevenTime = data['sevenTime'],
# eightTime = data['eightTime'], nineTime = data['nineTime'], tenTime = data['tenTime'], date = data['date'], classesId = data['grade'])
# pr.save()
# return HttpResponseRedirect('/index/all/')
# userInfo = userInformation.objects.filter(userID = request.user.id)
# userName = request.user.first_name
# t = get_template('all.html')
# html = t.render(Context({'profilePicturePath' : userInfo[0].userProfilePicturePath, 'username' : userName, 'form' : form}))
# return StreamingHttpResponse(html) | mit | -6,916,202,942,270,189,000 | 45.015873 | 149 | 0.730504 | false |
olatoft/reverse-hangman | Main.py | 1 | 2693 | import Words
def get_word_length():
word_length = 0
while word_length == 0:
try:
word_length = int(input('Kor mange bokstavar er det i ordet?\n'))
except:
print('Du må skrive inn eit tal. Prøv igjen.\n')
return word_length
def get_if_letter_in_word(letter):
answer = ''
while answer == '':
answer = input('\nInneheldt ordet bokstaven ' + letter +
'? Ja eller nei?\n')
if (answer == 'Ja') or (answer == 'ja'):
return True
elif (answer == 'Nei') or (answer == 'nei'):
return False
else:
answer = ''
print('Du må skrive enten ja eller nei\n')
def get_letter_pos_list(letter, words):
letter_pos_list = ''
while letter_pos_list == '':
try:
while letter_pos_list == '':
letter_pos_list = input(
'Skriv inn nummer på posisjonar i ordet der bokstaven ' +
letter + ' er med:\n').split()
if len(letter_pos_list) == 0:
letter_pos_list = ''
print('Du må skrive inn minst 1 tal. Prøv igjen\n')
for i in range(len(letter_pos_list)):
letter_pos_list[i] = int(letter_pos_list[i]) - 1
if (min(letter_pos_list) < 0) or (
(max(letter_pos_list) + 1) > words.word_length):
letter_pos_list = ''
print('Tal må vere større enn null og mindre enn ordlengde.\n')
except:
letter_pos_list = ''
print('Du må skrive inn tal. Prøv igjen.\n')
return letter_pos_list
def loop(words):
while True:
letter = words.get_letter_to_guess()
answer = get_if_letter_in_word(letter)
if answer:
letter_pos_list = get_letter_pos_list(letter, words)
for element in letter_pos_list:
words.set_words_with_letter_in_pos(letter, element)
for i in range(words.word_length):
if i not in letter_pos_list:
words.set_words_without_letter_in_pos(letter, i)
else:
words.set_words_without_letter(letter)
if len(words.get_words()) == 1:
print('\nOrdet er ' + words.get_words()[0])
break
elif len(words.get_words()) == 0:
print('\nOrdet er ikkje i ordboka')
break
print(words.get_words())
def main():
words = Words.Words()
words.word_length = get_word_length()
words.set_words_with_length(words.word_length)
loop(words)
if __name__ == '__main__':
main()
| gpl-3.0 | 4,939,119,348,872,317,000 | 32.123457 | 83 | 0.513231 | false |
cheral/orange3 | Orange/canvas/gui/tests/test_toolbox.py | 6 | 1419 | """
Tests for ToolBox widget.
"""
from .. import test
from .. import toolbox
from AnyQt.QtWidgets import QLabel, QListView, QSpinBox, QAbstractButton
from AnyQt.QtGui import QIcon
class TestToolBox(test.QAppTestCase):
def test_tool_box(self):
w = toolbox.ToolBox()
style = self.app.style()
icon = QIcon(style.standardPixmap(style.SP_FileIcon))
p1 = QLabel("A Label")
p2 = QListView()
p3 = QLabel("Another\nlabel")
p4 = QSpinBox()
i1 = w.addItem(p1, "T1", icon)
i2 = w.addItem(p2, "Tab " * 10, icon, "a tab")
i3 = w.addItem(p3, "t3")
i4 = w.addItem(p4, "t4")
self.assertSequenceEqual([i1, i2, i3, i4], range(4))
self.assertEqual(w.count(), 4)
for i, item in enumerate([p1, p2, p3, p4]):
self.assertIs(item, w.widget(i))
b = w.tabButton(i)
a = w.tabAction(i)
self.assertIsInstance(b, QAbstractButton)
self.assertIs(b.defaultAction(), a)
w.show()
w.removeItem(2)
self.assertEqual(w.count(), 3)
self.assertIs(w.widget(2), p4)
p3 = QLabel("Once More Unto the Breach")
w.insertItem(2, p3, "Dear friend")
self.assertEqual(w.count(), 4)
self.assertIs(w.widget(1), p2)
self.assertIs(w.widget(2), p3)
self.assertIs(w.widget(3), p4)
self.app.exec_()
| bsd-2-clause | 4,222,601,053,344,378,400 | 25.277778 | 72 | 0.564482 | false |
CeON/avroknife | avroknife/test/command_line_runner.py | 1 | 8137 | # Copyright 2013-2015 University of Warsaw
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
import subprocess
from avroknife.test.tools import Tools
from avroknife.test.file_system import HDFS, LocalFS
class RunResult:
def __init__(self, stdout, input_dir, output_dir):
self.__stdout = stdout
self.__input_dir = input_dir
self.__output_dir = output_dir
def get_stdout(self):
"""Get produced stdout string"""
return self.__stdout
def get_input_path(self, name):
"""Get path to the input in the local file system"""
return os.path.join(self.__input_dir, name)
def get_output_path(self, name):
"""Get path to the output in the local file system"""
return os.path.join(self.__output_dir, name)
def __str__(self):
return 'stdout: {}\n\ninput_dir={}\noutput_dir={}'.format(
self.__stdout, self.__input_dir, self.__output_dir)
class CommandLineRunnerException:
def __init__(self, exception):
"""
Args:
exception: instance of subprocess.CalledProcessError
"""
self.returncode = exception.returncode
self.cmd = exception.cmd
self.output = exception.output
class CommandLineRunner:
__input_file_prefix = '@in:'
__output_file_prefix = '@out:'
__input_subdir = 'input'
__outputs_subdir = 'outputs'
__hdfs_path_prefix = ''
__local_path_prefix = 'local:'
def __init__(self, program_path, local_input_dir, enforce_local=False):
"""
Args:
program_path: path to the command line program to be executed
local_input_dir: path to the directory containing input files
that can be referenced through placeholders
enforce_local: allow running the program only on local file system.
HDFS is not accessed in this mode.
"""
self.__program_path = program_path
self.__enforce_local = enforce_local
self.__local_fs = LocalFS()
self.__local_tmp_dir = \
self.__initialize_tmp_dirs(self.__local_fs, local_input_dir)
if not self.__enforce_local:
self.__hdfs = HDFS()
self.__hdfs_tmp_dir = \
self.__initialize_tmp_dirs(self.__hdfs, local_input_dir)
self.__is_closed = False
@staticmethod
def __initialize_tmp_dirs(fs, local_input_dir):
dir_ = fs.create_temporary_dir()
fs.copy_from_local_dir(local_input_dir,
fs.join_path([dir_, CommandLineRunner.__input_subdir]))
fs.create_dir(fs.join_path([dir_, CommandLineRunner.__outputs_subdir]))
return dir_
def run(self, args_string, is_input_local, is_output_local,
discard_stderr=False):
"""
Execute program with replacing placeholders in arguments string
Args:
args_string: parameters of the program with file placeholders
is_input_local: if True, the input placeholders will be replaced
with paths in local file system. If False, they will be replaced
with paths in HDFS.
is_output_local: if True, the output placeholders will be replaced
with path in local file system. If False, they will be replaced
with paths in HDFS.
ignore_stderr: if True, the standar output is discarded
Returns:
RunResult object
Raises:
CommandLineRunnerException: exception raised when executed process
returns non-zero exit status.
"""
if self.__is_closed:
raise Exception('This object has been already closed')
if self.__enforce_local:
if not (is_input_local and is_output_local):
raise Exception('is_input_local={}, is_output_local={}, while '\
'the enforce_local mode allows running the program only '\
'on local file system '.\
format(is_input_local, is_output_local))
local_out_dir = self.__local_fs.create_temporary_dir(
self.__local_fs.join_path([self.__local_tmp_dir, self.__outputs_subdir]))
hdfs_out_dir = None
if not is_output_local:
hdfs_out_dir = self.__hdfs.create_temporary_dir(
self.__hdfs.join_path([self.__hdfs_tmp_dir, self.__outputs_subdir]))
args_replaced = self.__replace_args(args_string,
is_input_local, is_output_local, local_out_dir, hdfs_out_dir)
stdout = self.run_raw(args_replaced, discard_stderr)
if not is_output_local:
## We need to delete this directory because the copying operation
## requires that the destination directory doesn't already exist
self.__local_fs.delete_dir(local_out_dir)
self.__hdfs.copy_to_local_dir(hdfs_out_dir, local_out_dir)
return RunResult(stdout,
os.path.join(self.__local_tmp_dir, self.__input_subdir),
local_out_dir)
def run_raw(self, args_string, discard_stderr=False):
"""
Execute program WITHOUT replacing placeholders in arguments string
Args:
args_string: parameters of the program
discard_stderr: if True, the standard error is discarded
Returns:
stdout string
"""
return self.__system(self.__program_path + ' ' + args_string,
discard_stderr)
def __replace_args(self, args_string, is_input_local, is_output_local,
local_out_dir, hdfs_out_dir):
text = args_string
if is_input_local:
text = self.__replace(text, self.__input_file_prefix,
self.__local_path_prefix, self.__local_fs.join_path,
[self.__local_tmp_dir, self.__input_subdir])
else:
text = self.__replace(text, self.__input_file_prefix,
self.__hdfs_path_prefix, self.__hdfs.join_path,
[self.__hdfs_tmp_dir, self.__input_subdir])
if is_output_local:
text = self.__replace(text, self.__output_file_prefix,
self.__local_path_prefix, self.__local_fs.join_path,
[local_out_dir])
else:
text = self.__replace(text, self.__output_file_prefix,
self.__hdfs_path_prefix, self.__hdfs.join_path,
[hdfs_out_dir])
return text
@staticmethod
def __replace(text, placeholder_prefix, path_prefix, path_joiner_function,
dir_name_elements):
"""Replace placeholders with paths to files"""
replaced = Tools.replace(text, placeholder_prefix,
lambda s: path_prefix + path_joiner_function(dir_name_elements + [s]))
return replaced
def close(self):
"""Do the cleanup"""
if self.__is_closed:
raise Exception('This object has been already closed')
self.__is_closed = True
self.__local_fs.delete_dir(self.__local_tmp_dir)
if not self.__enforce_local:
self.__hdfs.delete_dir(self.__hdfs_tmp_dir)
@staticmethod
def __system(command, discard_stderr):
try:
if discard_stderr:
with open(os.devnull, 'w') as devnull:
return subprocess.check_output(
command, shell=True, stderr=devnull)
else:
return subprocess.check_output(command, shell=True)
except subprocess.CalledProcessError as ex:
raise CommandLineRunnerException(ex)
| apache-2.0 | 9,083,836,292,020,347,000 | 38.5 | 85 | 0.598501 | false |
bostonlink/pamalt | pamalt/transforms/log_queries.py | 1 | 5494 | #!/usr/bin/env python
# Copyright (C) 2012 pamalt Developer.
# This file is part of pamalt - https://github.com/bostonlink/pamalt
# See the file 'LICENSE' for copying permission.
# PaloAlto Log query Maltego transforms module
# Author: David Bressler (@bostonlink)
import urllib, urllib2
import time, sys
import xml.etree.ElementTree as ET
from pamalt.lib import pamod
# Threat Log queries
def ip_2_threat(pa_hostname, key, ip):
query = '(addr.dst in %s) or (addr.src in %s)' % (ip, ip)
jobid = pamod.pa_log_query(pa_hostname, key, 'threat', query)
time.sleep(5)
# Loop function to check if the log query job is done
root = ET.fromstring(pamod.pa_log_get(pa_hostname, key, jobid))
for status in root.findall(".//job/status"):
while status.text == 'ACT':
time.sleep(5)
root = ET.fromstring(pamod.pa_log_get(pa_hostname, key, jobid))
for status in root.findall(".//job/status"):
if status.text == 'FIN':
break
# parse the log data and create dictionaries stored in a list for each individual log
log_list = []
for entry in root.findall(".//log/logs/entry"):
entry_dic = {}
for data in entry:
entry_dic[data.tag] = data.text
log_list.append(entry_dic)
# Maltego XML Output
print "<MaltegoMessage>\n<MaltegoTransformResponseMessage>"
print " <Entities>"
threat_list = []
for dic in log_list:
if dic['threatid'] in threat_list:
continue
else:
print """ <Entity Type="pamalt.paThreat">
<Value>%s</Value>
<AdditionalFields>
<Field Name="ipsrc" DisplayName="IP Source">%s</Field>
<Field Name="ipdst" DisplayName="IP Destination">%s</Field>
<Field Name="tid" DisplayName="Threat ID">%s</Field>
</AdditionalFields>
</Entity>""" % (dic['threatid'], dic['src'], dic['dst'], dic['tid'])
threat_list.append(dic['threatid'])
print " </Entities>"
print "</MaltegoTransformResponseMessage>\n</MaltegoMessage>"
def threat_2_ipsrc(pa_hostname, key, tid):
query = '(threatid eq %s)' % (tid)
jobid = pamod.pa_log_query(pa_hostname, key, 'threat', query)
time.sleep(5)
# Loop function to check if the log query job is done
root = ET.fromstring(pamod.pa_log_get(pa_hostname, key, jobid))
for status in root.findall(".//job/status"):
while status.text == 'ACT':
time.sleep(5)
root = ET.fromstring(pamod.pa_log_get(pa_hostname, key, jobid))
for status in root.findall(".//job/status"):
if status.text == 'FIN':
break
# parse the log data and create dictionaries stored in a list for each individual log
log_list = []
for entry in root.findall(".//log/logs/entry"):
entry_dic = {}
for data in entry:
entry_dic[data.tag] = data.text
log_list.append(entry_dic)
# Maltego XML Output
print "<MaltegoMessage>\n<MaltegoTransformResponseMessage>"
print " <Entities>"
ip_list = []
for dic in log_list:
if dic['src'] in ip_list:
continue
else:
print """ <Entity Type="maltego.IPv4Address">
<Value>%s</Value>
<AdditionalFields>
<Field Name="ipdst" DisplayName="IP Destination">%s</Field>
<Field Name="tid" DisplayName="Threat ID">%s</Field>
</AdditionalFields>
</Entity>""" % (dic['src'], dic['dst'], dic['tid'])
ip_list.append(dic['src'])
print " </Entities>"
print "</MaltegoTransformResponseMessage>\n</MaltegoMessage>"
def threat_2_ipdst(pa_hostname, key, tid):
query = '(threatid eq %s)' % (tid)
jobid = pamod.pa_log_query(pa_hostname, key, 'threat', query)
time.sleep(5)
# Loop function to check if the log query job is done
root = ET.fromstring(pamod.pa_log_get(pa_hostname, key, jobid))
for status in root.findall(".//job/status"):
while status.text == 'ACT':
time.sleep(5)
root = ET.fromstring(pamod.pa_log_get(pa_hostname, key, jobid))
for status in root.findall(".//job/status"):
if status.text == 'FIN':
break
# parse the log data and create dictionaries stored in a list for each individual log
log_list = []
for entry in root.findall(".//log/logs/entry"):
entry_dic = {}
for data in entry:
entry_dic[data.tag] = data.text
log_list.append(entry_dic)
# Maltego XML Output
print "<MaltegoMessage>\n<MaltegoTransformResponseMessage>"
print " <Entities>"
ip_list = []
for dic in log_list:
if dic['dst'] in ip_list:
continue
else:
print """ <Entity Type="maltego.IPv4Address">
<Value>%s</Value>
<AdditionalFields>
<Field Name="ipdst" DisplayName="IP Source">%s</Field>
<Field Name="tid" DisplayName="Threat ID">%s</Field>
</AdditionalFields>
</Entity>""" % (dic['dst'], dic['src'], dic['tid'])
ip_list.append(dic['dst'])
print " </Entities>"
print "</MaltegoTransformResponseMessage>\n</MaltegoMessage>" | gpl-3.0 | -3,957,049,368,385,212,400 | 34.681818 | 89 | 0.568074 | false |
jolyonb/edx-platform | cms/urls.py | 1 | 14472 | from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib.admin import autodiscover as django_autodiscover
from django.utils.translation import ugettext_lazy as _
from rest_framework_swagger.views import get_swagger_view
import contentstore.views
from cms.djangoapps.contentstore.views.organization import OrganizationListView
import openedx.core.djangoapps.common_views.xblock
import openedx.core.djangoapps.debug.views
import openedx.core.djangoapps.lang_pref.views
from openedx.core.djangoapps.password_policy import compliance as password_policy_compliance
from openedx.core.djangoapps.password_policy.forms import PasswordPolicyAwareAdminAuthForm
from ratelimitbackend import admin
django_autodiscover()
admin.site.site_header = _('Studio Administration')
admin.site.site_title = admin.site.site_header
if password_policy_compliance.should_enforce_compliance_on_login():
admin.site.login_form = PasswordPolicyAwareAdminAuthForm
# Custom error pages
# These are used by Django to render these error codes. Do not remove.
# pylint: disable=invalid-name
handler404 = contentstore.views.render_404
handler500 = contentstore.views.render_500
# Pattern to match a course key or a library key
COURSELIKE_KEY_PATTERN = r'(?P<course_key_string>({}|{}))'.format(
r'[^/]+/[^/]+/[^/]+', r'[^/:]+:[^/+]+\+[^/+]+(\+[^/]+)?'
)
# Pattern to match a library key only
LIBRARY_KEY_PATTERN = r'(?P<library_key_string>library-v1:[^/+]+\+[^/+]+)'
urlpatterns = [
url(r'', include('openedx.core.djangoapps.user_authn.urls_common')),
url(r'', include('student.urls')),
url(r'^transcripts/upload$', contentstore.views.upload_transcripts, name='upload_transcripts'),
url(r'^transcripts/download$', contentstore.views.download_transcripts, name='download_transcripts'),
url(r'^transcripts/check$', contentstore.views.check_transcripts, name='check_transcripts'),
url(r'^transcripts/choose$', contentstore.views.choose_transcripts, name='choose_transcripts'),
url(r'^transcripts/replace$', contentstore.views.replace_transcripts, name='replace_transcripts'),
url(r'^transcripts/rename$', contentstore.views.rename_transcripts, name='rename_transcripts'),
url(r'^preview/xblock/(?P<usage_key_string>.*?)/handler/(?P<handler>[^/]*)(?:/(?P<suffix>.*))?$',
contentstore.views.preview_handler, name='preview_handler'),
url(r'^xblock/(?P<usage_key_string>.*?)/handler/(?P<handler>[^/]*)(?:/(?P<suffix>.*))?$',
contentstore.views.component_handler, name='component_handler'),
url(r'^xblock/resource/(?P<block_type>[^/]*)/(?P<uri>.*)$',
openedx.core.djangoapps.common_views.xblock.xblock_resource, name='xblock_resource_url'),
url(r'^not_found$', contentstore.views.not_found, name='not_found'),
url(r'^server_error$', contentstore.views.server_error, name='server_error'),
url(r'^organizations$', OrganizationListView.as_view(), name='organizations'),
# noop to squelch ajax errors
url(r'^event$', contentstore.views.event, name='event'),
url(r'^heartbeat', include('openedx.core.djangoapps.heartbeat.urls')),
url(r'^user_api/', include('openedx.core.djangoapps.user_api.legacy_urls')),
url(r'^i18n/', include('django.conf.urls.i18n')),
# User API endpoints
url(r'^api/user/', include('openedx.core.djangoapps.user_api.urls')),
# Update session view
url(r'^lang_pref/session_language',
openedx.core.djangoapps.lang_pref.views.update_session_language,
name='session_language'
),
# Darklang View to change the preview language (or dark language)
url(r'^update_lang/', include('openedx.core.djangoapps.dark_lang.urls', namespace='dark_lang')),
# For redirecting to help pages.
url(r'^help_token/', include('help_tokens.urls')),
url(r'^api/', include('cms.djangoapps.api.urls', namespace='api')),
# restful api
url(r'^$', contentstore.views.howitworks, name='homepage'),
url(r'^howitworks$', contentstore.views.howitworks, name='howitworks'),
url(r'^signup$', contentstore.views.signup, name='signup'),
url(r'^signin$', contentstore.views.login_page, name='login'),
url(r'^signin_redirect_to_lms$', contentstore.views.login_redirect_to_lms, name='login_redirect_to_lms'),
url(r'^request_course_creator$', contentstore.views.request_course_creator, name='request_course_creator'),
url(r'^course_team/{}(?:/(?P<email>.+))?$'.format(COURSELIKE_KEY_PATTERN),
contentstore.views.course_team_handler, name='course_team_handler'),
url(r'^course_info/{}$'.format(settings.COURSE_KEY_PATTERN), contentstore.views.course_info_handler,
name='course_info_handler'),
url(r'^course_info_update/{}/(?P<provided_id>\d+)?$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.course_info_update_handler, name='course_info_update_handler'
),
url(r'^home/?$', contentstore.views.course_listing, name='home'),
url(r'^course/{}/search_reindex?$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.course_search_index_handler,
name='course_search_index_handler'
),
url(r'^course/{}?$'.format(settings.COURSE_KEY_PATTERN), contentstore.views.course_handler, name='course_handler'),
url(r'^checklists/{}?$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.checklists_handler,
name='checklists_handler'),
url(r'^course_notifications/{}/(?P<action_state_id>\d+)?$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.course_notifications_handler,
name='course_notifications_handler'),
url(r'^course_rerun/{}$'.format(settings.COURSE_KEY_PATTERN), contentstore.views.course_rerun_handler,
name='course_rerun_handler'),
url(r'^container/{}$'.format(settings.USAGE_KEY_PATTERN), contentstore.views.container_handler,
name='container_handler'),
url(r'^orphan/{}$'.format(settings.COURSE_KEY_PATTERN), contentstore.views.orphan_handler,
name='orphan_handler'),
url(r'^assets/{}/{}?$'.format(settings.COURSE_KEY_PATTERN, settings.ASSET_KEY_PATTERN),
contentstore.views.assets_handler,
name='assets_handler'),
url(r'^import/{}$'.format(COURSELIKE_KEY_PATTERN), contentstore.views.import_handler,
name='import_handler'),
url(r'^import_status/{}/(?P<filename>.+)$'.format(COURSELIKE_KEY_PATTERN),
contentstore.views.import_status_handler, name='import_status_handler'),
# rest api for course import/export
url(r'^api/courses/',
include('cms.djangoapps.contentstore.api.urls', namespace='courses_api')
),
url(r'^export/{}$'.format(COURSELIKE_KEY_PATTERN), contentstore.views.export_handler,
name='export_handler'),
url(r'^export_output/{}$'.format(COURSELIKE_KEY_PATTERN), contentstore.views.export_output_handler,
name='export_output_handler'),
url(r'^export_status/{}$'.format(COURSELIKE_KEY_PATTERN), contentstore.views.export_status_handler,
name='export_status_handler'),
url(r'^xblock/outline/{}$'.format(settings.USAGE_KEY_PATTERN), contentstore.views.xblock_outline_handler,
name='xblock_outline_handler'),
url(r'^xblock/container/{}$'.format(settings.USAGE_KEY_PATTERN), contentstore.views.xblock_container_handler,
name='xblock_container_handler'),
url(r'^xblock/{}/(?P<view_name>[^/]+)$'.format(settings.USAGE_KEY_PATTERN), contentstore.views.xblock_view_handler,
name='xblock_view_handler'),
url(r'^xblock/{}?$'.format(settings.USAGE_KEY_PATTERN), contentstore.views.xblock_handler,
name='xblock_handler'),
url(r'^tabs/{}$'.format(settings.COURSE_KEY_PATTERN), contentstore.views.tabs_handler,
name='tabs_handler'),
url(r'^settings/details/{}$'.format(settings.COURSE_KEY_PATTERN), contentstore.views.settings_handler,
name='settings_handler'),
url(r'^settings/grading/{}(/)?(?P<grader_index>\d+)?$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.grading_handler, name='grading_handler'),
url(r'^settings/advanced/{}$'.format(settings.COURSE_KEY_PATTERN), contentstore.views.advanced_settings_handler,
name='advanced_settings_handler'),
url(r'^textbooks/{}$'.format(settings.COURSE_KEY_PATTERN), contentstore.views.textbooks_list_handler,
name='textbooks_list_handler'),
url(r'^textbooks/{}/(?P<textbook_id>\d[^/]*)$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.textbooks_detail_handler, name='textbooks_detail_handler'),
url(r'^videos/{}(?:/(?P<edx_video_id>[-\w]+))?$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.videos_handler, name='videos_handler'),
url(r'^video_images/{}(?:/(?P<edx_video_id>[-\w]+))?$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.video_images_handler, name='video_images_handler'),
url(r'^transcript_preferences/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.transcript_preferences_handler, name='transcript_preferences_handler'),
url(r'^transcript_credentials/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.transcript_credentials_handler, name='transcript_credentials_handler'),
url(r'^transcript_download/$', contentstore.views.transcript_download_handler, name='transcript_download_handler'),
url(r'^transcript_upload/$', contentstore.views.transcript_upload_handler, name='transcript_upload_handler'),
url(r'^transcript_delete/{}(?:/(?P<edx_video_id>[-\w]+))?(?:/(?P<language_code>[^/]*))?$'.format(
settings.COURSE_KEY_PATTERN
), contentstore.views.transcript_delete_handler, name='transcript_delete_handler'),
url(r'^video_encodings_download/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.video_encodings_download, name='video_encodings_download'),
url(r'^group_configurations/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.group_configurations_list_handler,
name='group_configurations_list_handler'),
url(r'^group_configurations/{}/(?P<group_configuration_id>\d+)(/)?(?P<group_id>\d+)?$'.format(
settings.COURSE_KEY_PATTERN), contentstore.views.group_configurations_detail_handler,
name='group_configurations_detail_handler'),
url(r'^api/val/v0/', include('edxval.urls')),
url(r'^api/tasks/v0/', include('user_tasks.urls')),
url(r'^accessibility$', contentstore.views.accessibility, name='accessibility'),
]
JS_INFO_DICT = {
'domain': 'djangojs',
# We need to explicitly include external Django apps that are not in LOCALE_PATHS.
'packages': ('openassessment',),
}
if settings.FEATURES.get('ENABLE_CONTENT_LIBRARIES'):
urlpatterns += [
url(r'^library/{}?$'.format(LIBRARY_KEY_PATTERN),
contentstore.views.library_handler, name='library_handler'),
url(r'^library/{}/team/$'.format(LIBRARY_KEY_PATTERN),
contentstore.views.manage_library_users, name='manage_library_users'),
]
if settings.FEATURES.get('ENABLE_EXPORT_GIT'):
urlpatterns += [
url(r'^export_git/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.export_git,
name='export_git')
]
if settings.FEATURES.get('ENABLE_SERVICE_STATUS'):
urlpatterns.append(url(r'^status/', include('openedx.core.djangoapps.service_status.urls')))
# The password pages in the admin tool are disabled so that all password
# changes go through our user portal and follow complexity requirements.
urlpatterns.append(url(r'^admin/password_change/$', handler404))
urlpatterns.append(url(r'^admin/auth/user/\d+/password/$', handler404))
urlpatterns.append(url(r'^admin/', include(admin.site.urls)))
# enable entrance exams
if settings.FEATURES.get('ENTRANCE_EXAMS'):
urlpatterns.append(url(r'^course/{}/entrance_exam/?$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.entrance_exam))
# Enable Web/HTML Certificates
if settings.FEATURES.get('CERTIFICATES_HTML_VIEW'):
from contentstore.views.certificates import (
certificate_activation_handler,
signatory_detail_handler,
certificates_detail_handler,
certificates_list_handler
)
urlpatterns += [
url(r'^certificates/activation/{}/'.format(settings.COURSE_KEY_PATTERN),
certificate_activation_handler,
name='certificate_activation_handler'),
url(r'^certificates/{}/(?P<certificate_id>\d+)/signatories/(?P<signatory_id>\d+)?$'.format(
settings.COURSE_KEY_PATTERN), signatory_detail_handler, name='signatory_detail_handler'),
url(r'^certificates/{}/(?P<certificate_id>\d+)?$'.format(settings.COURSE_KEY_PATTERN),
certificates_detail_handler, name='certificates_detail_handler'),
url(r'^certificates/{}$'.format(settings.COURSE_KEY_PATTERN),
certificates_list_handler, name='certificates_list_handler')
]
# Maintenance Dashboard
urlpatterns.append(url(r'^maintenance/', include('maintenance.urls', namespace='maintenance')))
if settings.DEBUG:
try:
from .urls_dev import urlpatterns as dev_urlpatterns
urlpatterns += dev_urlpatterns
except ImportError:
pass
urlpatterns += static(
settings.VIDEO_IMAGE_SETTINGS['STORAGE_KWARGS']['base_url'],
document_root=settings.VIDEO_IMAGE_SETTINGS['STORAGE_KWARGS']['location']
)
urlpatterns += static(
settings.VIDEO_TRANSCRIPTS_SETTINGS['STORAGE_KWARGS']['base_url'],
document_root=settings.VIDEO_TRANSCRIPTS_SETTINGS['STORAGE_KWARGS']['location']
)
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns.append(url(r'^__debug__/', include(debug_toolbar.urls)))
# UX reference templates
urlpatterns.append(url(r'^template/(?P<template>.+)$', openedx.core.djangoapps.debug.views.show_reference_template,
name='openedx.core.djangoapps.debug.views.show_reference_template'))
# display error page templates, for testing purposes
urlpatterns += [
url(r'^404$', handler404),
url(r'^500$', handler500),
]
if settings.FEATURES.get('ENABLE_API_DOCS'):
urlpatterns += [
url(r'^api-docs/$', get_swagger_view(title='Studio API')),
]
from openedx.core.djangoapps.plugins import constants as plugin_constants, plugin_urls
urlpatterns.extend(plugin_urls.get_patterns(plugin_constants.ProjectType.CMS))
| agpl-3.0 | -4,871,503,868,721,324,000 | 52.010989 | 119 | 0.697208 | false |
Bouke/django-user-sessions | user_sessions/management/commands/migratesessions.py | 1 | 2472 | # -*- coding: UTF-8 -*-
import importlib
import logging
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
from user_sessions.models import Session as UserSession
logger = logging.getLogger(__name__)
def get_model_class(full_model_name):
try:
old_model_package, old_model_class_name = full_model_name.rsplit('.', 1)
package = importlib.import_module(old_model_package)
return getattr(package, old_model_class_name)
except RuntimeError as e:
if 'INSTALLED_APPS' in e.message:
raise RuntimeError(
"To run this command, temporarily append '{model}' to settings.INSTALLED_APPS"
.format(model=old_model_package.rsplit('.models')[0]))
raise
class Command(BaseCommand):
"""
Convert existing (old) sessions to the user_sessions SessionStore.
If you have an operational site and switch to user_sessions, you might want to keep your
active users logged in. We assume the old sessions are stored in a database table `oldmodel`.
This command creates a `user_session.Session` object for each session of the previous model.
"""
def add_arguments(self, parser):
parser.add_argument(
'--oldmodel',
dest='oldmodel',
default='django.contrib.sessions.models.Session',
help='Existing session model to migrate to the new UserSessions database table'
)
def handle(self, *args, **options):
User = get_user_model()
old_sessions = get_model_class(options['oldmodel']).objects.all()
logger.info("Processing %d session objects" % old_sessions.count())
conversion_count = 0
for old_session in old_sessions:
if not UserSession.objects.filter(session_key=old_session.session_key).exists():
data = old_session.get_decoded()
user = None
if '_auth_user_id' in data:
user = User.objects.filter(pk=data['_auth_user_id']).first()
UserSession.objects.create(
session_key=old_session.session_key,
session_data=old_session.session_data,
expire_date=old_session.expire_date,
user=user,
ip='127.0.0.1'
)
conversion_count += 1
logger.info("Created %d new session objects" % conversion_count)
| mit | -1,303,204,494,783,075,600 | 38.870968 | 97 | 0.619741 | false |
rjungbeck/rasterizer | servicebase.py | 1 | 3168 | import argparse
import json
import multiprocessing
import os
import logging
import win32api
import win32service
import win32serviceutil
cmdline_style="pywin32"
logger=logging.getLogger("servicebase")
class ServiceBase(win32serviceutil.ServiceFramework):
_svc_name_ = "RsjService"
_svc_display_name_ = "RSJ Service"
_svc_deps_=[]
epilog="(C) Copyright 2013-2014 by RSJ Software GmbH Germering. All rights reserved."
options={}
def __init__(self, args=None):
if args:
#self._svc_name_=args[0]
try:
win32serviceutil.ServiceFramework.__init__(self, args)
except:
pass
def SvcDoRun(self):
import servicemanager
servicemanager.LogMsg(servicemanager.EVENTLOG_INFORMATION_TYPE,servicemanager.PYS_SERVICE_STARTED,(self._svc_name_, ''))
directory=self.getOption("directory")
if directory:
os.chdir(directory)
self.ServiceRun()
servicemanager.LogInfoMsg("%s - STOPPED!" %(self._svc_display_name_,))
def SvcStop(self):
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
self.ServiceStop()
def ServiceMain(self):
multiprocessing.freeze_support()
win32api.SetConsoleCtrlHandler(self.ctrlHandler, True)
parser=argparse.ArgumentParser(self._svc_display_name_, epilog=self.epilog, fromfile_prefix_chars="@")
customInstallOptions=""
for k,v in self.options.iteritems():
customInstallOptions+=k[1:]+":"
parser.add_argument(k, type=str, default=v.get("default", None),help=v.get("help", None))
parser.add_argument("--username", type=str, default=None, help="User name")
parser.add_argument("--password", type=str, default=None, help="Password")
parser.add_argument("--startup", type=str, default="manual", help="Startup type (auto, manual, disabled)")
subparsers=parser.add_subparsers(help="Subcommands")
parserInstall=subparsers.add_parser("install", help="Install Service")
parserUninstall=subparsers.add_parser("remove", help="Remove Service")
parserConfig=subparsers.add_parser("update", help="Update Service")
parserDebug=subparsers.add_parser("debug", help="Debug")
parserStart=subparsers.add_parser("start", help="Start Service")
parserStop=subparsers.add_parser("stop", help="Stop Service")
parserRestart=subparsers.add_parser("restart", help="Restart Service")
self.__name__=self.__class__.__name__
win32serviceutil.HandleCommandLine(self,customInstallOptions=customInstallOptions, customOptionHandler=self.customOptionHandler)
def ServiceRun(self):
pass
def ServiceStop(self):
pass
def ctrlHandler(self, ctrlType):
return True
def customOptionHandler(self, opts):
logger.debug(opts)
for opt,val in opts:
if opt in self.options:
if "name" in self.options[opt]:
self.setOption(self.options[opt]["name"], val)
self.setOption("directory", os.getcwd())
def setOption(self, name, val):
win32serviceutil.SetServiceCustomOption(self, name, val)
def getOption(self, name, default=None):
return win32serviceutil.GetServiceCustomOption(self, name, default)
| agpl-3.0 | 4,275,224,004,615,823,400 | 27.886792 | 130 | 0.703598 | false |
beeftornado/sentry | tests/sentry_plugins/victorops/test_plugin.py | 2 | 3778 | from __future__ import absolute_import
import responses
from exam import fixture
from sentry.models import Rule
from sentry.plugins.base import Notification
from sentry.testutils import PluginTestCase
from sentry.utils import json
from sentry_plugins.victorops.plugin import VictorOpsPlugin
SUCCESS = """{
"result":"success",
"entity_id":"86dc4115-72d3-4219-9d8e-44939c1c409d"
}"""
class UnicodeTestInterface(object):
def __init__(self, title, body):
self.title = title
self.body = body
def to_string(self, event):
return self.body
def get_title(self):
return self.title
class VictorOpsPluginTest(PluginTestCase):
@fixture
def plugin(self):
return VictorOpsPlugin()
def test_conf_key(self):
assert self.plugin.conf_key == "victorops"
def test_entry_point(self):
self.assertPluginInstalled("victorops", self.plugin)
def test_is_configured(self):
assert self.plugin.is_configured(self.project) is False
self.plugin.set_option("api_key", "abcdef", self.project)
assert self.plugin.is_configured(self.project) is True
@responses.activate
def test_simple_notification(self):
responses.add(
"POST",
"https://alert.victorops.com/integrations/generic/20131114/alert/secret-api-key/everyone",
body=SUCCESS,
)
self.plugin.set_option("api_key", "secret-api-key", self.project)
self.plugin.set_option("routing_key", "everyone", self.project)
event = self.store_event(
data={
"message": "Hello world",
"level": "warning",
"culprit": "foo.bar",
"platform": "python",
"stacktrace": {
"frames": [
{
"filename": "sentry/models/foo.py",
"context_line": " string_max_length=self.string_max_length)",
"function": "build_msg",
"lineno": 29,
}
]
},
},
project_id=self.project.id,
)
group = event.group
rule = Rule.objects.create(project=self.project, label="my rule")
notification = Notification(event=event, rule=rule)
with self.options({"system.url-prefix": "http://example.com"}):
self.plugin.notify(notification)
request = responses.calls[0].request
payload = json.loads(request.body)
assert {
"message_type": "WARNING",
"entity_id": group.id,
"entity_display_name": "Hello world",
"monitoring_tool": "sentry",
"state_message": 'Stacktrace\n-----------\n\nStacktrace (most recent call last):\n\n File "sentry/models/foo.py", line 29, in build_msg\n string_max_length=self.string_max_length)\n\nMessage\n-----------\n\nHello world',
"timestamp": int(event.datetime.strftime("%s")),
"issue_url": "http://example.com/organizations/baz/issues/%s/" % group.id,
"issue_id": group.id,
"project_id": group.project.id,
} == payload
def test_build_description_unicode(self):
event = self.store_event(
data={"message": u"abcd\xde\xb4", "culprit": "foo.bar", "level": "error"},
project_id=self.project.id,
)
event.interfaces = {
u"Message": UnicodeTestInterface(u"abcd\xde\xb4", u"\xdc\xea\x80\x80abcd\xde\xb4")
}
description = self.plugin.build_description(event)
assert description == u"abcd\xde\xb4\n-----------\n\n\xdc\xea\x80\x80abcd\xde\xb4"
| bsd-3-clause | 8,218,235,582,282,228,000 | 33.66055 | 236 | 0.567761 | false |
cdegroc/scikit-learn | examples/covariance/plot_outlier_detection.py | 2 | 3882 | """
==========================================
Outlier detection with several methods.
==========================================
This example illustrates two ways of performing :ref:`outlier_detection`
when the amount of contamination is known:
- based on a robust estimator of covariance, which is assuming that the
data are Gaussian distributed and performs better than the One-Class SVM
in that case.
- using the One-Class SVM and its ability to capture the shape of the
data set, hence performing better when the data is strongly
non-Gaussian, i.e. with two well-separated clusters;
The ground truth about inliers and outliers is given by the points colors
while the orange-filled area indicates which points are reported as outliers
by each method.
Here, we assume that we know the fraction of outliers in the datasets.
Thus rather than using the 'predict' method of the objects, we set the
threshold on the decision_function to separate out the corresponding
fraction.
"""
print __doc__
import numpy as np
import pylab as pl
import matplotlib.font_manager
from scipy import stats
from sklearn import svm
from sklearn.covariance import EllipticEnvelop
# Example settings
n_samples = 200
outliers_fraction = 0.25
clusters_separation = [0, 1, 2]
# define two outlier detection tools to be compared
classifiers = {
"One-Class SVM": svm.OneClassSVM(nu=0.95 * outliers_fraction + 0.05,
kernel="rbf", gamma=0.1),
"robust covariance estimator": EllipticEnvelop(contamination=.1),
}
# Compare given classifiers under given settings
xx, yy = np.meshgrid(np.linspace(-7, 7, 500), np.linspace(-7, 7, 500))
n_inliers = int((1. - outliers_fraction) * n_samples)
n_outliers = int(outliers_fraction * n_samples)
ground_truth = np.ones(n_samples, dtype=int)
ground_truth[-n_outliers:] = 0
# Fit the problem with varying cluster separation
for i, offset in enumerate(clusters_separation):
np.random.seed(42)
# Data generation
X1 = 0.3 * np.random.randn(0.5 * n_inliers, 2) - offset
X2 = 0.3 * np.random.randn(0.5 * n_inliers, 2) + offset
X = np.r_[X1, X2]
# Add outliers
X = np.r_[X, np.random.uniform(low=-6, high=6, size=(n_outliers, 2))]
# Fit the model with the One-Class SVM
pl.figure(figsize=(10, 5))
pl.set_cmap(pl.cm.Blues_r)
for i, (clf_name, clf) in enumerate(classifiers.iteritems()):
# fit the data and tag outliers
clf.fit(X)
y_pred = clf.decision_function(X).ravel()
threshold = stats.scoreatpercentile(y_pred,
100 * outliers_fraction)
y_pred = y_pred > threshold
n_errors = (y_pred != ground_truth).sum()
# plot the levels lines and the points
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
subplot = pl.subplot(1, 2, i + 1)
subplot.set_title("Outlier detection")
subplot.contourf(xx, yy, Z,
levels=np.linspace(Z.min(), threshold, 7))
a = subplot.contour(xx, yy, Z, levels=[threshold],
linewidths=2, colors='red')
subplot.contourf(xx, yy, Z, levels=[threshold, Z.max()],
colors='orange')
b = subplot.scatter(X[:-n_outliers, 0], X[:-n_outliers, 1], c='white')
c = subplot.scatter(X[-n_outliers:, 0], X[-n_outliers:, 1], c='black')
subplot.axis('tight')
subplot.legend(
[a.collections[0], b, c],
['learned decision function', 'true inliers', 'true outliers'],
prop=matplotlib.font_manager.FontProperties(size=11))
subplot.set_xlabel("%d. %s (errors: %d)" % (i + 1, clf_name, n_errors))
subplot.set_xlim((-7, 7))
subplot.set_ylim((-7, 7))
pl.subplots_adjust(0.04, 0.1, 0.96, 0.94, 0.1, 0.26)
pl.show()
| bsd-3-clause | -2,521,126,800,319,898,000 | 37.82 | 79 | 0.62983 | false |
bgewehr/RPiMower | sens_groundCam.py | 1 | 1775 | #!/usr/bin/python
__author__ = 'mp911de'
import time
import os,sys
import picamera
import picamera.array
import time
import numpy as np
import lib_mqtt as MQTT
from math import sqrt, atan2, degrees
DEBUG = False
def get_colour_name(rgb):
rgb = rgb / 255
alpha = (2 * rgb[0] - rgb[1] - rgb [2])/2
beta = sqrt(3)/2*(rgb[1] - rgb[2])
hue = int(degrees(atan2(beta, alpha)))
std = np.std(rgb)
mean = np.mean(rgb)
if hue < 0:
hue = hue + 360
if std < 0.055:
if mean > 0.85:
colour = "white"
elif mean < 0.15:
colour = "black"
else:
colour = "grey"
elif (hue > 50) and (hue <= 160):
colour = "green"
elif (hue > 160) and (hue <= 250):
colour = "blue"
else:
colour = "red"
if DEBUG:
print rgb, hue, std, mean, colour
return colour
if __name__ == '__main__':
# os.nice(10)
try:
MQTT.init()
while True:
with picamera.PiCamera() as camera:
with picamera.array.PiRGBArray(camera) as stream:
camera.start_preview()
camera.resolution = (100, 100)
for foo in camera.capture_continuous(stream, 'rgb', use_video_port=False, resize=None, splitter_port=0, burst=True):
stream.truncate()
stream.seek(0)
RGBavg = stream.array.mean(axis=0).mean(axis=0)
colour = get_colour_name(RGBavg)
MQTT.mqttc.publish("/RPiMower/Ground_Colour", colour)
# interrupt
except KeyboardInterrupt:
print("Programm interrupted")
camera.stop_preview()
MQTT.cleanup()
sys.exit(2)
| gpl-2.0 | 3,489,056,492,993,856,500 | 26.307692 | 137 | 0.522817 | false |
pcampese/codewars | next_bigger3.py | 1 | 1877 | # https://www.codewars.com/kata/next-bigger-number-with-the-same-digits/train/python
def next_bigger(n):
import itertools
# Print the arguments
print('n = {}'.format(n))
# Define default result as -1
result = -1
# Convert the number to a list of digits
numbers = [int(d) for d in str(n)]
print('numbers = {}'.format(numbers))
# Save a permanent copy of the original numbers list
number_vault = numbers[:]
# Create next largest number
# Start from right to left
# Goal is to keep as many as the left most digits as possible, as they are and
# for the right-most digits, sort as few as possible (sorted from low to high)
# Current number sorted
numbers_sorted = sorted(numbers)
print('numbers_sorted = {}'.format(numbers_sorted))
# Get the number length
number_length = len(numbers)
# For length of 2
if (number_length == 2):
sorted_number = int(''.join([str(d) for d in sorted(numbers)]))
if (sorted_number > n):
result = sorted_number
else:
element = numbers.pop(1)
print(element)
numbers.insert(0, element)
print(numbers)
result = list_to_int(numbers)
print(result)
# For length of 3
elif (number_length >= 3):
numbers_on_right = next_bigger(list_to_int(numbers[1:]))
if numbers_on_right >= 0:
result = list(str(numbers_on_right))
result.insert(0,numbers[0])
result = list_to_int(result)
print('result = {}'.format(result))
return result
def swap(array, item1, item2):
index_1 = array.index(item1)
index_2 = array.index(item2)
array[index_1], array[index_2] = array[index_2], array[index_1]
return array
def shift_left(arr, index):
print('Array = {}'.format(arr))
print('Index = {}'.format(index))
# element = arr.pop(index)
# arr.insert(index - 1, element)
arr.insert(arr[index-1], [arr[index]])
del arr[index]
def list_to_int(numbers):
return int(''.join([str(d) for d in numbers])) | gpl-3.0 | -2,691,056,460,727,516,700 | 25.083333 | 84 | 0.67821 | false |
IATI/CSV2IATI-backend | csviati/codes.py | 1 | 4999 | crs_country = {
'130': ('DZ', 'Algeria'),
'133': ('LY', 'Libya'),
'136': ('MA', 'Morocco'),
'139': ('TN', 'Tunisia'),
'142': ('EG', 'Egypt'),
'218': ('ZA', 'South Africa'),
'225': ('AO', 'Angola'),
'227': ('BW', 'Botswana'),
'228': ('BI', 'Burundi'),
'229': ('CM', 'Cameroon'),
'230': ('CV', 'Cape Verde'),
'231': ('CF', 'Central African Rep.'),
'232': ('TD', 'Chad'),
'233': ('KM', 'Comoros'),
'234': ('CG', 'Congo, Rep.'),
'235': ('CD', 'Congo, Dem. Rep.'),
'236': ('BJ', 'Benin'),
'238': ('ET', 'Ethiopia'),
'239': ('GA', 'Gabon'),
'240': ('GM', 'Gambia'),
'241': ('GH', 'Ghana'),
'243': ('GN', 'Guinea'),
'244': ('GW', 'Guinea-Bissau'),
'245': ('GQ', 'Equatorial Guinea'),
'247': ('CI', "Cote d'Ivoire"),
'248': ('KE', 'Kenya'),
'249': ('LS', 'Lesotho'),
'251': ('LR', 'Liberia'),
'252': ('MG', 'Madagascar'),
'253': ('MW', 'Malawi'),
'255': ('ML', 'Mali'),
'256': ('MR', 'Mauritania'),
'257': ('MU', 'Mauritius'),
'258': ('YT', 'Mayotte'),
'259': ('MZ', 'Mozambique'),
'260': ('NE', 'Niger'),
'261': ('NG', 'Nigeria'),
'265': ('ZW', 'Zimbabwe'),
'266': ('RW', 'Rwanda'),
'268': ('ST', 'Sao Tome & Principe'),
'269': ('SN', 'Senegal'),
'270': ('SC', 'Seychelles'),
'271': ('ER', 'Eritrea'),
'272': ('SL', 'Sierra Leone'),
'273': ('SO', 'Somalia'),
'274': ('DJ', 'Djibouti'),
'275': ('NA', 'Namibia'),
'276': ('SH', 'St. Helena'),
'278': ('SD', 'Sudan'),
'280': ('SZ', 'Swaziland'),
'282': ('TZ', 'Tanzania'),
'283': ('TG', 'Togo'),
'285': ('UG', 'Uganda'),
'287': ('BF', 'Burkina Faso'),
'288': ('ZM', 'Zambia'),
'329': ('BB', 'Barbados'),
'336': ('CR', 'Costa Rica'),
'338': ('CU', 'Cuba'),
'340': ('DO', 'Dominican Republic'),
'342': ('SV', 'El Salvador'),
'347': ('GT', 'Guatemala'),
'349': ('HT', 'Haiti'),
'351': ('HN', 'Honduras'),
'352': ('BZ', 'Belize'),
'354': ('JM', 'Jamaica'),
'358': ('MX', 'Mexico'),
'364': ('NI', 'Nicaragua'),
'366': ('PA', 'Panama'),
'375': ('TT', 'Trinidad and Tobago'),
'376': ('AI', 'Anguilla'),
'377': ('AG', 'Antigua and Barbuda'),
'378': ('DM', 'Dominica'),
'381': ('GD', 'Grenada'),
'382': ('KN', 'St. Kitts-Nevis'),
'383': ('LC', 'St. Lucia'),
'384': ('VC', 'St.Vincent & Grenadines'),
'385': ('MS', 'Montserrat'),
'425': ('AR', 'Argentina'),
'428': ('BO', 'Bolivia'),
'431': ('BR', 'Brazil'),
'434': ('CL', 'Chile'),
'437': ('CO', 'Colombia'),
'440': ('EC', 'Ecuador'),
'446': ('GY', 'Guyana'),
'451': ('PY', 'Paraguay'),
'454': ('PE', 'Peru'),
'457': ('SR', 'Suriname'),
'460': ('UY', 'Uruguay'),
'463': ('VE', 'Venezuela'),
'540': ('IR', 'Iran'),
'543': ('IQ', 'Iraq'),
'549': ('JO', 'Jordan'),
'55': ('TR', 'Turkey'),
'550': ('PS', 'Palestinian Adm. Areas'),
'555': ('LB', 'Lebanon'),
'558': ('OM', 'Oman'),
'57': ('XK', 'Kosovo'),
'573': ('SY', 'Syria'),
'580': ('YE', 'Yemen'),
'610': ('AM', 'Armenia'),
'611': ('AZ', 'Azerbaijan'),
'612': ('GE', 'Georgia'),
'613': ('KZ', 'Kazakhstan'),
'614': ('KG', 'Kyrgyz Republic'),
'615': ('TJ', 'Tajikistan'),
'616': ('TM', 'Turkmenistan'),
'617': ('UZ', 'Uzbekistan'),
'62': ('HR', 'Croatia'),
'625': ('AF', 'Afghanistan'),
'63': ('RS', 'Serbia'),
'630': ('BT', 'Bhutan'),
'635': ('MM', 'Myanmar'),
'64': ('BA', 'Bosnia-Herzegovina'),
'640': ('LK', 'Sri Lanka'),
'645': ('IN', 'India'),
'65': ('ME', 'Montenegro'),
'655': ('MV', 'Maldives'),
'66': ('MK', 'Macedonia, FYR'),
'660': ('NP', 'Nepal'),
'665': ('PK', 'Pakistan'),
'666': ('BD', 'Bangladesh'),
'71': ('AL', 'Albania'),
'728': ('KH', 'Cambodia'),
'730': ('CN', 'China'),
'738': ('ID', 'Indonesia'),
'740': ('KP', 'Korea, Dem. Rep.'),
'745': ('LA', 'Laos'),
'751': ('MY', 'Malaysia'),
'753': ('MN', 'Mongolia'),
'755': ('PH', 'Philippines'),
'764': ('TH', 'Thailand'),
'765': ('TL', 'Timor-Leste'),
'769': ('VN', 'Viet Nam'),
'831': ('CK', 'Cook Islands'),
'832': ('FJ', 'Fiji'),
'836': ('KI', 'Kiribati'),
'845': ('NR', 'Nauru'),
'85': ('UA', 'Ukraine'),
'854': ('VU', 'Vanuatu'),
'856': ('NU', 'Niue'),
'859': ('MH', 'Marshall Islands'),
'86': ('BY', 'Belarus'),
'860': ('FM', 'Micronesia, Fed. States'),
'861': ('PW', 'Palau'),
'862': ('PG', 'Papua New Guinea'),
'866': ('SB', 'Solomon Islands'),
'868': ('TK', 'Tokelau'),
'870': ('TO', 'Tonga'),
'872': ('TV', 'Tuvalu'),
'876': ('WF', 'Wallis & Futuna'),
'880': ('WS', 'Samoa'),
'93': ('MD', 'Moldova'),
'Recipient code': ('ISO-2', 'Recipient name (EN)')
}
crs_region = {
'189': 'North of Sahara, regional',
'289': 'South of Sahara, regional',
'298': 'Africa, regional',
'380': 'West Indies, regional',
'389': 'North & Central America, regional',
'489': 'South America, regional',
'498': 'America, regional',
'589': 'Middle East, regional',
'619': 'Central Asia, regional',
'679': 'South Asia, regional',
'689': 'South & Central Asia, regional',
'789': 'Far East Asia, regional',
'798': 'Asia, regional',
'88': 'States Ex-Yugoslavia',
'889': 'Oceania, regional',
'89': 'Europe, regional',
'998': 'Bilateral, unspecified'
}
| mit | -8,177,016,022,128,188,000 | 27.565714 | 51 | 0.481896 | false |
pytroll/pytroll-schedule | trollsched/spherical.py | 1 | 11055 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2013, 2014, 2015, 2018 Martin Raspaud
# Author(s):
# Martin Raspaud <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Some generalized spherical functions.
base type is a numpy array of size (n, 2) (2 for lon and lats)
"""
import numpy as np
import pyresample.spherical
import logging
logger = logging.getLogger(__name__)
class SCoordinate(object):
"""Spherical coordinates
"""
def __init__(self, lon, lat):
self.lon = lon
self.lat = lat
def cross2cart(self, point):
"""Compute the cross product, and convert to cartesian coordinates
"""
lat1 = self.lat
lon1 = self.lon
lat2 = point.lat
lon2 = point.lon
ad = np.sin(lat1 - lat2) * np.cos((lon1 - lon2) / 2.0)
be = np.sin(lat1 + lat2) * np.sin((lon1 - lon2) / 2.0)
c = np.sin((lon1 + lon2) / 2.0)
f = np.cos((lon1 + lon2) / 2.0)
g = np.cos(lat1)
h = np.cos(lat2)
i = np.sin(lon2 - lon1)
res = CCoordinate(np.array([-ad * c + be * f,
ad * f + be * c,
g * h * i]))
return res
def to_cart(self):
"""Convert to cartesian.
"""
return CCoordinate(np.array([np.cos(self.lat) * np.cos(self.lon),
np.cos(self.lat) * np.sin(self.lon),
np.sin(self.lat)]))
def distance(self, point):
"""Vincenty formula.
"""
dlambda = self.lon - point.lon
num = ((np.cos(point.lat) * np.sin(dlambda)) ** 2 +
(np.cos(self.lat) * np.sin(point.lat) -
np.sin(self.lat) * np.cos(point.lat) *
np.cos(dlambda)) ** 2)
den = (np.sin(self.lat) * np.sin(point.lat) +
np.cos(self.lat) * np.cos(point.lat) * np.cos(dlambda))
return np.arctan2(num ** .5, den)
def hdistance(self, point):
"""Haversine formula
"""
return 2 * np.arcsin((np.sin((point.lat - self.lat) / 2.0) ** 2.0 +
np.cos(point.lat) * np.cos(self.lat) *
np.sin((point.lon - self.lon) / 2.0) ** 2.0) ** .5)
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
return np.allclose((self.lon, self.lat), (other.lon, other.lat))
def __str__(self):
return str((np.rad2deg(self.lon), np.rad2deg(self.lat)))
def __repr__(self):
return str((np.rad2deg(self.lon), np.rad2deg(self.lat)))
def __iter__(self):
return [self.lon, self.lat].__iter__()
class CCoordinate(object):
"""Cartesian coordinates
"""
def __init__(self, cart):
self.cart = np.array(cart)
def norm(self):
"""Euclidean norm of the vector.
"""
return np.sqrt(np.einsum('...i, ...i', self.cart, self.cart))
def normalize(self):
"""normalize the vector.
"""
self.cart /= np.sqrt(np.einsum('...i, ...i', self.cart, self.cart))
return self
def cross(self, point):
"""cross product with another vector.
"""
return CCoordinate(np.cross(self.cart, point.cart))
def dot(self, point):
"""dot product with another vector.
"""
return np.inner(self.cart, point.cart)
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
return np.allclose(self.cart, other.cart)
def __str__(self):
return str(self.cart)
def __repr__(self):
return str(self.cart)
def __add__(self, other):
try:
return CCoordinate(self.cart + other.cart)
except AttributeError:
return CCoordinate(self.cart + np.array(other))
def __radd__(self, other):
return self.__add__(other)
def __mul__(self, other):
try:
return CCoordinate(self.cart * other.cart)
except AttributeError:
return CCoordinate(self.cart * np.array(other))
def __rmul__(self, other):
return self.__mul__(other)
def to_spherical(self):
return SCoordinate(np.arctan2(self.cart[1], self.cart[0]),
np.arcsin(self.cart[2]))
EPSILON = 0.0000001
def modpi(val, mod=np.pi):
"""Puts *val* between -*mod* and *mod*.
"""
return (val + mod) % (2 * mod) - mod
class Arc(object):
"""An arc of the great circle between two points.
"""
start = None
end = None
def __init__(self, start, end):
self.start, self.end = start, end
def __eq__(self, other):
if(self.start == other.start and self.end == other.end):
return 1
return 0
def __ne__(self, other):
return not self.__eq__(other)
def __str__(self):
return (str(self.start) + " -> " + str(self.end))
def __repr__(self):
return (str(self.start) + " -> " + str(self.end))
def angle(self, other_arc):
"""Oriented angle between two arcs.
"""
if self.start == other_arc.start:
a__ = self.start
b__ = self.end
c__ = other_arc.end
elif self.start == other_arc.end:
a__ = self.start
b__ = self.end
c__ = other_arc.start
elif self.end == other_arc.end:
a__ = self.end
b__ = self.start
c__ = other_arc.start
elif self.end == other_arc.start:
a__ = self.end
b__ = self.start
c__ = other_arc.end
else:
raise ValueError("No common point in angle computation.")
ua_ = a__.cross2cart(b__)
ub_ = a__.cross2cart(c__)
val = ua_.dot(ub_) / (ua_.norm() * ub_.norm())
if abs(val - 1) < EPSILON:
angle = 0
elif abs(val + 1) < EPSILON:
angle = np.pi
else:
angle = np.arccos(val)
n__ = ua_.normalize()
if n__.dot(c__.to_cart()) > 0:
return -angle
else:
return angle
def intersections(self, other_arc):
"""Gives the two intersections of the greats circles defined by the
current arc and *other_arc*.
From http://williams.best.vwh.net/intersect.htm
"""
if self.end.lon - self.start.lon > np.pi:
self.end.lon -= 2 * np.pi
if other_arc.end.lon - other_arc.start.lon > np.pi:
other_arc.end.lon -= 2 * np.pi
if self.end.lon - self.start.lon < -np.pi:
self.end.lon += 2 * np.pi
if other_arc.end.lon - other_arc.start.lon < -np.pi:
other_arc.end.lon += 2 * np.pi
ea_ = self.start.cross2cart(self.end).normalize()
eb_ = other_arc.start.cross2cart(other_arc.end).normalize()
cross = ea_.cross(eb_)
lat = np.arctan2(cross.cart[2],
np.sqrt(cross.cart[0] ** 2 + cross.cart[1] ** 2))
lon = np.arctan2(cross.cart[1], cross.cart[0])
return (SCoordinate(lon, lat),
SCoordinate(modpi(lon + np.pi), -lat))
def intersects(self, other_arc):
"""Says if two arcs defined by the current arc and the *other_arc*
intersect. An arc is defined as the shortest tracks between two points.
"""
return bool(self.intersection(other_arc))
def intersection(self, other_arc):
"""Says where, if two arcs defined by the current arc and the
*other_arc* intersect. An arc is defined as the shortest tracks between
two points.
"""
if self == other_arc:
return None
# if (self.end == other_arc.start or
# self.end == other_arc.end or
# self.start == other_arc.start or
# self.start == other_arc.end):
# return None
for i in self.intersections(other_arc):
a__ = self.start
b__ = self.end
c__ = other_arc.start
d__ = other_arc.end
ab_ = a__.hdistance(b__)
cd_ = c__.hdistance(d__)
if(((i in (a__, b__)) or
(abs(a__.hdistance(i) + b__.hdistance(i) - ab_) < EPSILON)) and
((i in (c__, d__)) or
(abs(c__.hdistance(i) + d__.hdistance(i) - cd_) < EPSILON))):
return i
return None
def get_next_intersection(self, arcs, known_inter=None):
"""Get the next intersection between the current arc and *arcs*
"""
res = []
for arc in arcs:
inter = self.intersection(arc)
if (inter is not None and
inter != arc.end and
inter != self.end):
res.append((inter, arc))
def dist(args):
"""distance key.
"""
return self.start.distance(args[0])
take_next = False
for inter, arc in sorted(res, key=dist):
if known_inter is not None:
if known_inter == inter:
take_next = True
elif take_next:
return inter, arc
else:
return inter, arc
return None, None
class SphPolygon(pyresample.spherical.SphPolygon):
def draw(self, mapper, options, **more_options):
lons = np.rad2deg(self.lon.take(np.arange(len(self.lon) + 1),
mode="wrap"))
lats = np.rad2deg(self.lat.take(np.arange(len(self.lat) + 1),
mode="wrap"))
rx, ry = mapper(lons, lats)
mapper.plot(rx, ry, options, **more_options)
def get_twilight_poly(utctime):
"""Return a polygon enclosing the sunlit part of the globe at *utctime*.
"""
from pyorbital import astronomy
ra, dec = astronomy.sun_ra_dec(utctime)
lon = modpi(ra - astronomy.gmst(utctime))
lat = dec
vertices = np.zeros((4, 2))
vertices[0, :] = modpi(lon - np.pi / 2), 0
if lat <= 0:
vertices[1, :] = lon, np.pi / 2 + lat
vertices[3, :] = modpi(lon + np.pi), -(np.pi / 2 + lat)
else:
vertices[1, :] = modpi(lon + np.pi), np.pi / 2 - lat
vertices[3, :] = lon, -(np.pi / 2 - lat)
vertices[2, :] = modpi(lon + np.pi / 2), 0
return SphPolygon(vertices)
| gpl-3.0 | 8,368,029,202,223,411,000 | 28.717742 | 81 | 0.520669 | false |
valexandersaulys/airbnb_kaggle_contest | venv/lib/python3.4/site-packages/keras/preprocessing/image.py | 1 | 7684 | from __future__ import absolute_import
import numpy as np
import re
from scipy import ndimage
from scipy import linalg
from os import listdir
from os.path import isfile, join
import random, math
from six.moves import range
'''
Fairly basic set of tools for realtime data augmentation on image data.
Can easily be extended to include new transforms, new preprocessing methods, etc...
'''
def random_rotation(x, rg, fill_mode="nearest", cval=0.):
angle = random.uniform(-rg, rg)
x = ndimage.interpolation.rotate(x, angle, axes=(1,2), reshape=False, mode=fill_mode, cval=cval)
return x
def random_shift(x, wrg, hrg, fill_mode="nearest", cval=0.):
crop_left_pixels = 0
crop_right_pixels = 0
crop_top_pixels = 0
crop_bottom_pixels = 0
original_w = x.shape[1]
original_h = x.shape[2]
if wrg:
crop = random.uniform(0., wrg)
split = random.uniform(0, 1)
crop_left_pixels = int(split*crop*x.shape[1])
crop_right_pixels = int((1-split)*crop*x.shape[1])
if hrg:
crop = random.uniform(0., hrg)
split = random.uniform(0, 1)
crop_top_pixels = int(split*crop*x.shape[2])
crop_bottom_pixels = int((1-split)*crop*x.shape[2])
x = ndimage.interpolation.shift(x, (0, crop_left_pixels, crop_top_pixels), mode=fill_mode, cval=cval)
return x
def horizontal_flip(x):
for i in range(x.shape[0]):
x[i] = np.fliplr(x[i])
return x
def vertical_flip(x):
for i in range(x.shape[0]):
x[i] = np.flipud(x[i])
return x
def random_barrel_transform(x, intensity):
# TODO
pass
def random_shear(x, intensity):
# TODO
pass
def random_channel_shift(x, rg):
# TODO
pass
def random_zoom(x, rg, fill_mode="nearest", cval=0.):
zoom_w = random.uniform(1.-rg, 1.)
zoom_h = random.uniform(1.-rg, 1.)
x = ndimage.interpolation.zoom(x, zoom=(1., zoom_w, zoom_h), mode=fill_mode, cval=cval)
return x # shape of result will be different from shape of input!
def array_to_img(x, scale=True):
from PIL import Image
x = x.transpose(1, 2, 0)
if scale:
x += max(-np.min(x), 0)
x /= np.max(x)
x *= 255
if x.shape[2] == 3:
# RGB
return Image.fromarray(x.astype("uint8"), "RGB")
else:
# grayscale
return Image.fromarray(x[:,:,0].astype("uint8"), "L")
def img_to_array(img):
x = np.asarray(img, dtype='float32')
if len(x.shape)==3:
# RGB: height, width, channel -> channel, height, width
x = x.transpose(2, 0, 1)
else:
# grayscale: height, width -> channel, height, width
x = x.reshape((1, x.shape[0], x.shape[1]))
return x
def load_img(path, grayscale=False):
from PIL import Image
img = Image.open(path)
if grayscale:
img = img.convert('L')
else: # Assure 3 channel even when loaded image is grayscale
img = img.convert('RGB')
return img
def list_pictures(directory, ext='jpg|jpeg|bmp|png'):
return [join(directory,f) for f in listdir(directory) \
if isfile(join(directory,f)) and re.match('([\w]+\.(?:' + ext + '))', f)]
class ImageDataGenerator(object):
'''
Generate minibatches with
realtime data augmentation.
'''
def __init__(self,
featurewise_center=True, # set input mean to 0 over the dataset
samplewise_center=False, # set each sample mean to 0
featurewise_std_normalization=True, # divide inputs by std of the dataset
samplewise_std_normalization=False, # divide each input by its std
zca_whitening=False, # apply ZCA whitening
rotation_range=0., # degrees (0 to 180)
width_shift_range=0., # fraction of total width
height_shift_range=0., # fraction of total height
horizontal_flip=False,
vertical_flip=False,
):
self.__dict__.update(locals())
self.mean = None
self.std = None
self.principal_components = None
def flow(self, X, y, batch_size=32, shuffle=False, seed=None, save_to_dir=None, save_prefix="", save_format="jpeg"):
if seed:
random.seed(seed)
if shuffle:
seed = random.randint(1, 10e6)
np.random.seed(seed)
np.random.shuffle(X)
np.random.seed(seed)
np.random.shuffle(y)
nb_batch = int(math.ceil(float(X.shape[0])/batch_size))
for b in range(nb_batch):
batch_end = (b+1)*batch_size
if batch_end > X.shape[0]:
nb_samples = X.shape[0] - b*batch_size
else:
nb_samples = batch_size
bX = np.zeros(tuple([nb_samples]+list(X.shape)[1:]))
for i in range(nb_samples):
x = X[b*batch_size+i]
x = self.random_transform(x.astype("float32"))
x = self.standardize(x)
bX[i] = x
if save_to_dir:
for i in range(nb_samples):
img = array_to_img(bX[i], scale=True)
img.save(save_to_dir + "/" + save_prefix + "_" + str(i) + "." + save_format)
yield bX, y[b*batch_size:b*batch_size+nb_samples]
def standardize(self, x):
if self.featurewise_center:
x -= self.mean
if self.featurewise_std_normalization:
x /= self.std
if self.zca_whitening:
flatx = np.reshape(x, (x.shape[0]*x.shape[1]*x.shape[2]))
whitex = np.dot(flatx, self.principal_components)
x = np.reshape(whitex, (x.shape[0], x.shape[1], x.shape[2]))
if self.samplewise_center:
x -= np.mean(x)
if self.samplewise_std_normalization:
x /= np.std(x)
return x
def random_transform(self, x):
if self.rotation_range:
x = random_rotation(x, self.rotation_range)
if self.width_shift_range or self.height_shift_range:
x = random_shift(x, self.width_shift_range, self.height_shift_range)
if self.horizontal_flip:
if random.random() < 0.5:
x = horizontal_flip(x)
if self.vertical_flip:
if random.random() < 0.5:
x = vertical_flip(x)
# TODO:
# zoom
# barrel/fisheye
# shearing
# channel shifting
return x
def fit(self, X,
augment=False, # fit on randomly augmented samples
rounds=1, # if augment, how many augmentation passes over the data do we use
seed=None):
'''
Required for featurewise_center, featurewise_std_normalization and zca_whitening.
'''
X = np.copy(X)
if augment:
aX = np.zeros(tuple([rounds*X.shape[0]]+list(X.shape)[1:]))
for r in range(rounds):
for i in range(X.shape[0]):
img = array_to_img(X[i])
img = self.random_transform(img)
aX[i+r*X.shape[0]] = img_to_array(img)
X = aX
if self.featurewise_center:
self.mean = np.mean(X, axis=0)
X -= self.mean
if self.featurewise_std_normalization:
self.std = np.std(X, axis=0)
X /= self.std
if self.zca_whitening:
flatX = np.reshape(X, (X.shape[0], X.shape[1]*X.shape[2]*X.shape[3]))
fudge = 10e-6
sigma = np.dot(flatX.T, flatX) / flatX.shape[1]
U, S, V = linalg.svd(sigma)
self.principal_components = np.dot(np.dot(U, np.diag(1. / np.sqrt(S + fudge))), U.T)
| gpl-2.0 | 4,823,282,614,734,855,000 | 30.235772 | 120 | 0.560776 | false |
thomas-pegot/gpumeanshift | test.py | 1 | 1067 |
from gpumeanshift import filter
import unittest
import numpy as np
import cv2
from cv2.cv import PyrMeanShiftFiltering
class TestGPUMeanShiftFilter(unittest.TestCase):
def test_emptyNDArray(self):
empty_array = np.array([], dtype = np.uint8)
self.assertRaises(TypeError, filter, *[empty_array, 12, 12])
def test_StringInput(self):
self.assertRaises(TypeError, filter, *['string_input', 12, 12])
def test_2dimsNDarray(self):
self.assertRaises(TypeError, filter, *[np.ones((5,5), dtype = np.uint8), 12, 12])
def test_5dimsNDarray(self):
self.assertRaises(TypeError, filter, *[np.ones((5,5,5,5,5), dtype = np.uint8), 12, 12])
def test_MeanShift(self):
self.path = "data/star.jpg"
img = cv2.imread(self.path, cv2.IMREAD_COLOR)
img_cpu = cv2.cv.LoadImage(self.path)
out_cpu = cv2.cv.CloneImage(img_cpu)
PyrMeanShiftFiltering(img_cpu, out_cpu, 12, 12)
out_array_cpu = np.asarray(out_cpu[:,:])
out_gpu = filter(img, 12, 12)[:,:,0:3]
self.assertAlmostEqual( out_gpu.all(), out_array_cpu.all())
if( __name__ == "__main__"):
unittest.main()
| unlicense | -6,187,523,808,740,400,000 | 34.566667 | 89 | 0.700094 | false |
crosenth/csvpandas | csvpandas/subcommands/sample.py | 1 | 2063 | # This file is part of csvpandas
#
# csvpandas is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# csvpandas is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with csvpandas. If not, see <http://www.gnu.org/licenses/>.
"""Randomly sample rows of a csv file
"""
import logging
import time
from csvpandas import utils
log = logging.getLogger(__name__)
def build_parser(parser):
# required inputs
parser.add_argument(
'n',
type=float,
help='number of rows to sample. Can be a decimal fraction.')
parser.add_argument(
'--seed-in',
type=utils.opener('r'),
help=('file containing integer to generate random seed'))
parser.add_argument(
'--seed-out',
type=utils.opener('w'),
help=('file containing integer used to generate seed'))
parser.add_argument(
'--rest',
help='file to output rows not included in sample.')
parser.add_argument(
'--replace',
action='store_true',
help=('Sample with or without replacement.'))
def action(args):
if args.seed_in:
seed = int(args.seed_in.read().strip())
else:
seed = int(time.time())
df = args.csv
if args.n < 1:
sample = df.sample(
frac=args.n, replace=args.replace, random_state=seed)
else:
sample = df.sample(
n=int(args.n), replace=args.replace, random_state=seed)
sample.to_csv(args.out)
if args.rest:
df[~df.index.isin(sample.index)].to_csv(args.rest)
if args.seed_out:
args.seed_out.write(str(seed))
| gpl-3.0 | -6,485,512,836,406,878,000 | 27.260274 | 73 | 0.636452 | false |
arpitprogressive/arpittest | pursuite/settings/staging.py | 1 | 2573 | # -*- coding: utf-8 -*-
"""
Setting for production env
:copyright: (c) 2013 by Openlabs Technologies & Consulting (P) Limited
:license: see LICENSE for more details.
"""
#Flake8: noqa
from common import *
STATIC_ROOT = '/opt/pursuite/www/static'
MEDIA_ROOT = '/opt/pursuite/www/media'
ALLOWED_HOSTS = ['pursuite.openlabs.us']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'pursuite',
'USER': 'mysqluser',
'PASSWORD': 'mysqlpassword',
'HOST': 'pursuite.c6ga5pe5mdoq.ap-southeast-1.rds.amazonaws.com',
'PORT': '3306',
}
}
# Email Settings
EMAIL_USE_TLS = False
EMAIL_HOST = 'mailtrap.io'
EMAIL_PORT = 2525
EMAIL_HOST_USER = 'nasscom-5ae7880ac967ae5d'
EMAIL_HOST_PASSWORD = 'eb5073db7bdb7af1'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
# Raven configuration
# Set your DSN value
RAVEN_CONFIG = {
'dsn': 'http://e542381309e640bebb79ae26123e52e5:' + \
'[email protected]/22',
}
# Add amazon s3 as a storage mechanism
INSTALLED_APPS += ('storages', 's3_folder_storage',)
DEFAULT_FILE_STORAGE = 's3_folder_storage.s3.DefaultStorage'
DEFAULT_S3_PATH = "media"
#STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
AWS_ACCESS_KEY_ID = "AKIAIBGU6ZPMYAHTFOWQ"
AWS_SECRET_ACCESS_KEY = "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"
AWS_STORAGE_BUCKET_NAME = "pursuite"
AWS_QUERYSTRING_AUTH = False
MEDIA_ROOT = '/%s/' % DEFAULT_S3_PATH
MEDIA_URL = '//s3.amazonaws.com/%s/media/' % AWS_STORAGE_BUCKET_NAME
ADMIN_MEDIA_PREFIX = STATIC_URL + 'admin/'
# Setup caching
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.PyLibMCCache',
'LOCATION': '127.0.0.1:11211',
}
}
| bsd-3-clause | -8,043,136,736,176,889,000 | 28.238636 | 74 | 0.643218 | false |
wkarmistead/InvestmentAnalysisDashboard | pyBackend/businessLayer/ValueScoreObject.py | 1 | 1740 | '''
Created on Apr 5, 2014
@author: Walker Armistead
'''
class ValueScoreObject(object):
_intrinsicValueToSharePrice = 0
_pe_ratio = 0
_peg = 0
_currentPrice = 0
_debtToEquityRatio = 0
_currentAssets = 0
_currentLiabilities = 0
_dividendYield = 0
_earningsGrowth = 0
def setIntrinsicValueToSharePrice(self):
# TODO
self._intrinsicValueToSharePrice = 1
def getIntrinsicValueToSharePrice(self):
return self._intrinsicValueToSharePrice
def setPEratio(self):
# TODO
self._pe_ratio = 1
def getPEratio(self):
return self._pe_ratio
def setPEG(self):
# TODO
self._peg = 1
def getPEG(self):
return self._peg
def setCurrentPrice(self):
# TODO
self._currentPrice
def getCurrentPrice(self):
return self._currentPrice
def setDebtToEquityRatio(self):
# TODO
self._debtToEquityRatio = 1
def getDebtToEquityRatio(self):
return self._debtToEquityRatio
def setCurrentAssets(self):
# TODO
self._currentAssets = 1
def getCurrentAssets(self):
return self._currentAssets
def setCurrentLiabilities(self):
# TODO
self._currentLiabilities = 1
def getCurrentLiabilities(self):
return self._currentLiabilities
def setDividendYield(self):
# TODO
self._dividendYield = 1
def getDividendYield(self):
return self._dividendYield
def setEarningsGrowth(self):
# TODO
self._earningsGrowth = 1
def getEarningsGrowth(self):
return self._earningsGrowth
| gpl-2.0 | -3,799,703,472,309,803,000 | 20.493827 | 47 | 0.597126 | false |
Frodox/buildbot | master/buildbot/test/unit/test_steps_shell.py | 1 | 41551 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
import re
import textwrap
from twisted.trial import unittest
from buildbot import config
from buildbot.process import properties
from buildbot.process import remotetransfer
from buildbot.process.results import EXCEPTION
from buildbot.process.results import FAILURE
from buildbot.process.results import SKIPPED
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.steps import shell
from buildbot.test.fake.remotecommand import Expect
from buildbot.test.fake.remotecommand import ExpectRemoteRef
from buildbot.test.fake.remotecommand import ExpectShell
from buildbot.test.util import config as configmixin
from buildbot.test.util import steps
class TestShellCommandExecution(steps.BuildStepMixin, unittest.TestCase, configmixin.ConfigErrorsMixin):
def setUp(self):
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def assertLegacySummary(self, step, running, done=None):
done = done or running
self.assertEqual(
(step._getLegacySummary(done=False),
step._getLegacySummary(done=True)),
(running, done))
def test_doStepIf_False(self):
self.setupStep(
shell.ShellCommand(command="echo hello", doStepIf=False))
self.expectOutcome(result=SKIPPED,
state_string=u"'echo hello' (skipped)")
return self.runStep()
def test_constructor_args_kwargs(self):
# this is an ugly way to define an API, but for now check that
# the RemoteCommand arguments are properly passed on
step = shell.ShellCommand(workdir='build', command="echo hello",
want_stdout=0, logEnviron=False)
self.assertEqual(step.remote_kwargs, dict(want_stdout=0,
logEnviron=False,
workdir='build',
usePTY=None))
def test_constructor_args_validity(self):
# this checks that an exception is raised for invalid arguments
self.assertRaisesConfigError(
"Invalid argument(s) passed to RemoteShellCommand: ",
lambda: shell.ShellCommand(workdir='build', command="echo Hello World",
wrongArg1=1, wrongArg2='two'))
def test_getLegacySummary_from_empty_command(self):
# this is more of a regression test for a potential failure, really
step = shell.ShellCommand(workdir='build', command=' ')
step.rendered = True
self.assertLegacySummary(step, None)
def test_getLegacySummary_from_short_command(self):
step = shell.ShellCommand(workdir='build', command="true")
step.rendered = True
self.assertLegacySummary(step, u"'true'")
def test_getLegacySummary_from_short_command_list(self):
step = shell.ShellCommand(workdir='build', command=["true"])
step.rendered = True
self.assertLegacySummary(step, "'true'")
def test_getLegacySummary_from_med_command(self):
step = shell.ShellCommand(command="echo hello")
step.rendered = True
self.assertLegacySummary(step, u"'echo hello'")
def test_getLegacySummary_from_med_command_list(self):
step = shell.ShellCommand(command=["echo", "hello"])
step.rendered = True
self.assertLegacySummary(step, u"'echo hello'")
def test_getLegacySummary_from_long_command(self):
step = shell.ShellCommand(command="this is a long command")
step.rendered = True
self.assertLegacySummary(step, u"'this is ...'")
def test_getLegacySummary_from_long_command_list(self):
step = shell.ShellCommand(command="this is a long command".split())
step.rendered = True
self.assertLegacySummary(step, u"'this is ...'")
def test_getLegacySummary_from_nested_command_list(self):
step = shell.ShellCommand(command=["this", ["is", "a"], "nested"])
step.rendered = True
self.assertLegacySummary(step, u"'this is ...'")
def test_getLegacySummary_from_nested_command_tuples(self):
step = shell.ShellCommand(command=["this", ("is", "a"), "nested"])
step.rendered = True
self.assertLegacySummary(step, u"'this is ...'")
def test_getLegacySummary_from_nested_command_list_empty(self):
step = shell.ShellCommand(command=["this", [], ["is", "a"], "nested"])
step.rendered = True
self.assertLegacySummary(step, u"'this is ...'")
def test_getLegacySummary_from_nested_command_list_deep(self):
step = shell.ShellCommand(command=[["this", [[["is", ["a"]]]]]])
step.rendered = True
self.assertLegacySummary(step, u"'this is ...'")
def test_getLegacySummary_custom(self):
step = shell.ShellCommand(command="echo hello",
description=["echoing"],
descriptionDone=["echoed"])
step.rendered = True
self.assertLegacySummary(step, None) # handled by parent class
def test_getLegacySummary_with_suffix(self):
step = shell.ShellCommand(
command="echo hello", descriptionSuffix="suffix")
step.rendered = True
self.assertLegacySummary(step, u"'echo hello' suffix")
def test_getLegacySummary_unrendered_WithProperties(self):
step = shell.ShellCommand(command=properties.WithProperties(''))
step.rendered = True
self.assertLegacySummary(step, None)
def test_getLegacySummary_unrendered_custom_new_style_class_renderable(self):
step = shell.ShellCommand(command=object())
step.rendered = True
self.assertLegacySummary(step, None)
def test_getLegacySummary_unrendered_custom_old_style_class_renderable(self):
class C:
pass
step = shell.ShellCommand(command=C())
step.rendered = True
self.assertLegacySummary(step, None)
def test_getLegacySummary_unrendered_WithProperties_list(self):
step = shell.ShellCommand(
command=['x', properties.WithProperties(''), 'y'])
step.rendered = True
self.assertLegacySummary(step, "'x y'")
def test_run_simple(self):
self.setupStep(
shell.ShellCommand(workdir='build', command="echo hello"))
self.expectCommands(
ExpectShell(workdir='build', command='echo hello')
+ 0
)
self.expectOutcome(result=SUCCESS, state_string="'echo hello'")
return self.runStep()
def test_run_list(self):
self.setupStep(
shell.ShellCommand(workdir='build',
command=['trial', '-b', '-B', 'buildbot.test']))
self.expectCommands(
ExpectShell(workdir='build',
command=['trial', '-b', '-B', 'buildbot.test'])
+ 0
)
self.expectOutcome(result=SUCCESS,
state_string="'trial -b ...'")
return self.runStep()
def test_run_nested_description(self):
self.setupStep(
shell.ShellCommand(workdir='build',
command=properties.FlattenList(
['trial', ['-b', '-B'], 'buildbot.test']),
descriptionDone=properties.FlattenList(
['test', ['done']]),
descriptionSuffix=properties.FlattenList(['suff', ['ix']])))
self.expectCommands(
ExpectShell(workdir='build',
command=['trial', '-b', '-B', 'buildbot.test'])
+ 0
)
self.expectOutcome(result=SUCCESS,
state_string='test done suff ix')
return self.runStep()
def test_run_nested_command(self):
self.setupStep(
shell.ShellCommand(workdir='build',
command=['trial', ['-b', '-B'], 'buildbot.test']))
self.expectCommands(
ExpectShell(workdir='build',
command=['trial', '-b', '-B', 'buildbot.test'])
+ 0
)
self.expectOutcome(result=SUCCESS,
state_string="'trial -b ...'")
return self.runStep()
def test_run_nested_deeply_command(self):
self.setupStep(
shell.ShellCommand(workdir='build',
command=[['trial', ['-b', ['-B']]], 'buildbot.test']))
self.expectCommands(
ExpectShell(workdir='build',
command=['trial', '-b', '-B', 'buildbot.test'])
+ 0
)
self.expectOutcome(result=SUCCESS,
state_string="'trial -b ...'")
return self.runStep()
def test_run_nested_empty_command(self):
self.setupStep(
shell.ShellCommand(workdir='build',
command=['trial', [], '-b', [], 'buildbot.test']))
self.expectCommands(
ExpectShell(workdir='build',
command=['trial', '-b', 'buildbot.test'])
+ 0
)
self.expectOutcome(result=SUCCESS,
state_string="'trial -b ...'")
return self.runStep()
def test_run_env(self):
self.setupStep(
shell.ShellCommand(workdir='build', command="echo hello"),
worker_env=dict(DEF='HERE'))
self.expectCommands(
ExpectShell(workdir='build', command='echo hello',
env=dict(DEF='HERE'))
+ 0
)
self.expectOutcome(result=SUCCESS)
return self.runStep()
def test_run_env_override(self):
self.setupStep(
shell.ShellCommand(workdir='build', env={'ABC': '123'},
command="echo hello"),
worker_env=dict(ABC='XXX', DEF='HERE'))
self.expectCommands(
ExpectShell(workdir='build', command='echo hello',
env=dict(ABC='123', DEF='HERE'))
+ 0
)
self.expectOutcome(result=SUCCESS)
return self.runStep()
def test_run_usePTY(self):
self.setupStep(
shell.ShellCommand(workdir='build', command="echo hello",
usePTY=False))
self.expectCommands(
ExpectShell(workdir='build', command='echo hello',
usePTY=False)
+ 0
)
self.expectOutcome(result=SUCCESS)
return self.runStep()
def test_run_usePTY_old_worker(self):
self.setupStep(
shell.ShellCommand(workdir='build', command="echo hello",
usePTY=True),
worker_version=dict(shell='1.1'))
self.expectCommands(
ExpectShell(workdir='build', command='echo hello')
+ 0
)
self.expectOutcome(result=SUCCESS)
return self.runStep()
def test_run_decodeRC(self, rc=1, results=WARNINGS, extra_text=" (warnings)"):
self.setupStep(
shell.ShellCommand(workdir='build', command="echo hello",
decodeRC={1: WARNINGS}))
self.expectCommands(
ExpectShell(workdir='build', command='echo hello')
+ rc
)
self.expectOutcome(
result=results, state_string="'echo hello'" + extra_text)
return self.runStep()
def test_run_decodeRC_defaults(self):
return self.test_run_decodeRC(2, FAILURE, extra_text=" (failure)")
def test_run_decodeRC_defaults_0_is_failure(self):
return self.test_run_decodeRC(0, FAILURE, extra_text=" (failure)")
def test_missing_command_error(self):
# this checks that an exception is raised for invalid arguments
self.assertRaisesConfigError(
"ShellCommand's `command' argument is not specified",
lambda: shell.ShellCommand())
class TreeSize(steps.BuildStepMixin, unittest.TestCase):
def setUp(self):
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_run_success(self):
self.setupStep(shell.TreeSize())
self.expectCommands(
ExpectShell(workdir='wkdir',
command=['du', '-s', '-k', '.'])
+ ExpectShell.log('stdio', stdout='9292 .\n')
+ 0
)
self.expectOutcome(result=SUCCESS,
state_string="treesize 9292 KiB")
self.expectProperty('tree-size-KiB', 9292)
return self.runStep()
def test_run_misparsed(self):
self.setupStep(shell.TreeSize())
self.expectCommands(
ExpectShell(workdir='wkdir',
command=['du', '-s', '-k', '.'])
+ ExpectShell.log('stdio', stdio='abcdef\n')
+ 0
)
self.expectOutcome(result=WARNINGS,
state_string="treesize unknown (warnings)")
return self.runStep()
def test_run_failed(self):
self.setupStep(shell.TreeSize())
self.expectCommands(
ExpectShell(workdir='wkdir',
command=['du', '-s', '-k', '.'])
+ ExpectShell.log('stdio', stderr='abcdef\n')
+ 1
)
self.expectOutcome(result=FAILURE,
state_string="treesize unknown (failure)")
return self.runStep()
class SetPropertyFromCommand(steps.BuildStepMixin, unittest.TestCase):
def setUp(self):
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_constructor_conflict(self):
self.assertRaises(config.ConfigErrors, lambda:
shell.SetPropertyFromCommand(property='foo', extract_fn=lambda: None))
def test_run_property(self):
self.setupStep(
shell.SetPropertyFromCommand(property="res", command="cmd"))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ ExpectShell.log('stdio', stdout='\n\nabcdef\n')
+ 0
)
self.expectOutcome(result=SUCCESS,
state_string="property 'res' set")
self.expectProperty("res", "abcdef") # note: stripped
self.expectLogfile('property changes', r"res: " + repr(u'abcdef'))
return self.runStep()
def test_renderable_workdir(self):
self.setupStep(
shell.SetPropertyFromCommand(property="res", command="cmd", workdir=properties.Interpolate('wkdir')))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ ExpectShell.log('stdio', stdout='\n\nabcdef\n')
+ 0
)
self.expectOutcome(result=SUCCESS,
state_string="property 'res' set")
self.expectProperty("res", "abcdef") # note: stripped
self.expectLogfile('property changes', r"res: " + repr(u'abcdef'))
return self.runStep()
def test_run_property_no_strip(self):
self.setupStep(shell.SetPropertyFromCommand(property="res", command="cmd",
strip=False))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ ExpectShell.log('stdio', stdout='\n\nabcdef\n')
+ 0
)
self.expectOutcome(result=SUCCESS,
state_string="property 'res' set")
self.expectProperty("res", "\n\nabcdef\n")
self.expectLogfile('property changes', r"res: " + repr(u'\n\nabcdef\n'))
return self.runStep()
def test_run_failure(self):
self.setupStep(
shell.SetPropertyFromCommand(property="res", command="blarg"))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="blarg")
+ ExpectShell.log('stdio', stderr='cannot blarg: File not found')
+ 1
)
self.expectOutcome(result=FAILURE,
state_string="'blarg' (failure)")
self.expectNoProperty("res")
return self.runStep()
def test_run_extract_fn(self):
def extract_fn(rc, stdout, stderr):
self.assertEqual(
(rc, stdout, stderr), (0, 'startend\n', 'STARTEND\n'))
return dict(a=1, b=2)
self.setupStep(
shell.SetPropertyFromCommand(extract_fn=extract_fn, command="cmd"))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ ExpectShell.log('stdio', stdout='start', stderr='START')
+ ExpectShell.log('stdio', stdout='end')
+ ExpectShell.log('stdio', stderr='END')
+ 0
)
self.expectOutcome(result=SUCCESS,
state_string="2 properties set")
self.expectLogfile('property changes', 'a: 1\nb: 2')
self.expectProperty("a", 1)
self.expectProperty("b", 2)
return self.runStep()
def test_run_extract_fn_cmdfail(self):
def extract_fn(rc, stdout, stderr):
self.assertEqual((rc, stdout, stderr), (3, '', ''))
return dict(a=1, b=2)
self.setupStep(
shell.SetPropertyFromCommand(extract_fn=extract_fn, command="cmd"))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ 3
)
# note that extract_fn *is* called anyway
self.expectOutcome(result=FAILURE,
state_string="2 properties set (failure)")
self.expectLogfile('property changes', 'a: 1\nb: 2')
return self.runStep()
def test_run_extract_fn_cmdfail_empty(self):
def extract_fn(rc, stdout, stderr):
self.assertEqual((rc, stdout, stderr), (3, '', ''))
return dict()
self.setupStep(
shell.SetPropertyFromCommand(extract_fn=extract_fn, command="cmd"))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ 3
)
# note that extract_fn *is* called anyway, but returns no properties
self.expectOutcome(result=FAILURE,
state_string="'cmd' (failure)")
return self.runStep()
def test_run_extract_fn_exception(self):
def extract_fn(rc, stdout, stderr):
raise RuntimeError("oh noes")
self.setupStep(
shell.SetPropertyFromCommand(extract_fn=extract_fn, command="cmd"))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ 0
)
# note that extract_fn *is* called anyway, but returns no properties
self.expectOutcome(result=EXCEPTION,
state_string="'cmd' (exception)")
d = self.runStep()
d.addCallback(lambda _:
self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1))
return d
def test_error_both_set(self):
"""
If both ``extract_fn`` and ``property`` are defined,
``SetPropertyFromCommand`` reports a config error.
"""
self.assertRaises(config.ConfigErrors,
shell.SetPropertyFromCommand, command=["echo", "value"], property="propname", extract_fn=lambda x: {"propname": "hello"})
def test_error_none_set(self):
"""
If neither ``extract_fn`` and ``property`` are defined,
``SetPropertyFromCommand`` reports a config error.
"""
self.assertRaises(config.ConfigErrors,
shell.SetPropertyFromCommand, command=["echo", "value"])
class PerlModuleTest(steps.BuildStepMixin, unittest.TestCase):
def setUp(self):
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_new_version_success(self):
self.setupStep(shell.PerlModuleTest(command="cmd"))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ ExpectShell.log('stdio', stdout=textwrap.dedent("""\
This junk ignored
Test Summary Report
Result: PASS
Tests: 10 Failed: 0
Tests: 10 Failed: 0
Files=93, Tests=20"""))
+ 0
)
self.expectOutcome(result=SUCCESS, state_string='20 tests 20 passed')
return self.runStep()
def test_new_version_warnings(self):
self.setupStep(shell.PerlModuleTest(command="cmd",
warningPattern='^OHNOES'))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ ExpectShell.log('stdio', stdout=textwrap.dedent("""\
This junk ignored
Test Summary Report
-------------------
foo.pl (Wstat: 0 Tests: 10 Failed: 0)
Failed test: 0
OHNOES 1
OHNOES 2
Files=93, Tests=20, 0 wallclock secs ...
Result: PASS"""))
+ 0
)
self.expectOutcome(
result=WARNINGS,
state_string='20 tests 20 passed 2 warnings (warnings)')
return self.runStep()
def test_new_version_failed(self):
self.setupStep(shell.PerlModuleTest(command="cmd"))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ ExpectShell.log('stdio', stdout=textwrap.dedent("""\
foo.pl .. 1/4"""))
+ ExpectShell.log('stdio', stderr=textwrap.dedent("""\
# Failed test 2 in foo.pl at line 6
# foo.pl line 6 is: ok(0);"""))
+ ExpectShell.log('stdio', stdout=textwrap.dedent("""\
foo.pl .. Failed 1/4 subtests
Test Summary Report
-------------------
foo.pl (Wstat: 0 Tests: 4 Failed: 1)
Failed test: 0
Files=1, Tests=4, 0 wallclock secs ( 0.06 usr 0.01 sys + 0.03 cusr 0.01 csys = 0.11 CPU)
Result: FAIL"""))
+ ExpectShell.log('stdio', stderr=textwrap.dedent("""\
Failed 1/1 test programs. 1/4 subtests failed."""))
+ 1
)
self.expectOutcome(result=FAILURE,
state_string='4 tests 3 passed 1 failed (failure)')
return self.runStep()
def test_old_version_success(self):
self.setupStep(shell.PerlModuleTest(command="cmd"))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ ExpectShell.log('stdio', stdout=textwrap.dedent("""\
This junk ignored
All tests successful
Files=10, Tests=20, 100 wall blah blah"""))
+ 0
)
self.expectOutcome(result=SUCCESS,
state_string='20 tests 20 passed')
return self.runStep()
def test_old_version_failed(self):
self.setupStep(shell.PerlModuleTest(command="cmd"))
self.expectCommands(
ExpectShell(workdir='wkdir',
command="cmd")
+ ExpectShell.log('stdio', stdout=textwrap.dedent("""\
This junk ignored
Failed 1/1 test programs, 3/20 subtests failed."""))
+ 1
)
self.expectOutcome(result=FAILURE,
state_string='20 tests 17 passed 3 failed (failure)')
return self.runStep()
class SetPropertyDeprecation(unittest.TestCase):
"""
Tests for L{shell.SetProperty}
"""
def test_deprecated(self):
"""
Accessing L{shell.SetProperty} reports a deprecation error.
"""
shell.SetProperty
warnings = self.flushWarnings([self.test_deprecated])
self.assertEqual(len(warnings), 1)
self.assertIdentical(warnings[0]['category'], DeprecationWarning)
self.assertEqual(warnings[0]['message'],
"buildbot.steps.shell.SetProperty was deprecated in Buildbot 0.8.8: "
"It has been renamed to SetPropertyFromCommand"
)
class Configure(unittest.TestCase):
def test_class_attrs(self):
# nothing too exciting here, but at least make sure the class is
# present
step = shell.Configure()
self.assertEqual(step.command, ['./configure'])
class WarningCountingShellCommand(steps.BuildStepMixin, unittest.TestCase,
configmixin.ConfigErrorsMixin):
def setUp(self):
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_no_warnings(self):
self.setupStep(shell.WarningCountingShellCommand(workdir='w',
command=['make']))
self.expectCommands(
ExpectShell(workdir='w',
command=["make"])
+ ExpectShell.log('stdio', stdout='blarg success!')
+ 0
)
self.expectOutcome(result=SUCCESS)
self.expectProperty("warnings-count", 0)
return self.runStep()
def test_default_pattern(self):
self.setupStep(shell.WarningCountingShellCommand(command=['make']))
self.expectCommands(
ExpectShell(workdir='wkdir',
command=["make"])
+ ExpectShell.log('stdio',
stdout='normal: foo\nwarning: blarg!\n'
'also normal\nWARNING: blarg!\n')
+ 0
)
self.expectOutcome(result=WARNINGS)
self.expectProperty("warnings-count", 2)
self.expectLogfile("warnings (2)",
"warning: blarg!\nWARNING: blarg!\n")
return self.runStep()
def test_custom_pattern(self):
self.setupStep(shell.WarningCountingShellCommand(command=['make'],
warningPattern=r"scary:.*"))
self.expectCommands(
ExpectShell(workdir='wkdir',
command=["make"])
+ ExpectShell.log('stdio',
stdout='scary: foo\nwarning: bar\nscary: bar')
+ 0
)
self.expectOutcome(result=WARNINGS)
self.expectProperty("warnings-count", 2)
self.expectLogfile("warnings (2)", "scary: foo\nscary: bar\n")
return self.runStep()
def test_maxWarnCount(self):
self.setupStep(shell.WarningCountingShellCommand(command=['make'],
maxWarnCount=9))
self.expectCommands(
ExpectShell(workdir='wkdir',
command=["make"])
+ ExpectShell.log('stdio', stdout='warning: noo!\n' * 10)
+ 0
)
self.expectOutcome(result=FAILURE)
self.expectProperty("warnings-count", 10)
return self.runStep()
def test_fail_with_warnings(self):
self.setupStep(shell.WarningCountingShellCommand(command=['make']))
self.expectCommands(
ExpectShell(workdir='wkdir',
command=["make"])
+ ExpectShell.log('stdio', stdout='warning: I might fail')
+ 3
)
self.expectOutcome(result=FAILURE)
self.expectProperty("warnings-count", 1)
self.expectLogfile("warnings (1)", "warning: I might fail\n")
return self.runStep()
def do_test_suppressions(self, step, supps_file='', stdout='',
exp_warning_count=0, exp_warning_log='',
exp_exception=False):
self.setupStep(step)
# Invoke the expected callbacks for the suppression file upload. Note
# that this assumes all of the remote_* are synchronous, but can be
# easily adapted to suit if that changes (using inlineCallbacks)
def upload_behavior(command):
writer = command.args['writer']
writer.remote_write(supps_file)
writer.remote_close()
command.rc = 0
if supps_file is not None:
self.expectCommands(
# step will first get the remote suppressions file
Expect('uploadFile', dict(blocksize=32768, maxsize=None,
workersrc='supps', workdir='wkdir',
writer=ExpectRemoteRef(remotetransfer.StringFileWriter)))
+ Expect.behavior(upload_behavior),
# and then run the command
ExpectShell(workdir='wkdir',
command=["make"])
+ ExpectShell.log('stdio', stdout=stdout)
+ 0
)
else:
self.expectCommands(
ExpectShell(workdir='wkdir',
command=["make"])
+ ExpectShell.log('stdio', stdout=stdout)
+ 0
)
if exp_exception:
self.expectOutcome(result=EXCEPTION,
state_string="'make' (exception)")
else:
if exp_warning_count != 0:
self.expectOutcome(result=WARNINGS,
state_string="'make' (warnings)")
self.expectLogfile("warnings (%d)" % exp_warning_count,
exp_warning_log)
else:
self.expectOutcome(result=SUCCESS,
state_string="'make'")
self.expectProperty("warnings-count", exp_warning_count)
return self.runStep()
def test_suppressions(self):
step = shell.WarningCountingShellCommand(command=['make'],
suppressionFile='supps')
supps_file = textwrap.dedent("""\
# example suppressions file
amar.c : .*unused variable.*
holding.c : .*invalid access to non-static.*
""").strip()
stdout = textwrap.dedent("""\
/bin/sh ../libtool --tag=CC --silent --mode=link gcc blah
/bin/sh ../libtool --tag=CC --silent --mode=link gcc blah
amar.c: In function 'write_record':
amar.c:164: warning: unused variable 'x'
amar.c:164: warning: this should show up
/bin/sh ../libtool --tag=CC --silent --mode=link gcc blah
/bin/sh ../libtool --tag=CC --silent --mode=link gcc blah
holding.c: In function 'holding_thing':
holding.c:984: warning: invalid access to non-static 'y'
""")
exp_warning_log = textwrap.dedent("""\
amar.c:164: warning: this should show up
""")
return self.do_test_suppressions(step, supps_file, stdout, 1,
exp_warning_log)
def test_suppressions_directories(self):
def warningExtractor(step, line, match):
return line.split(':', 2)
step = shell.WarningCountingShellCommand(command=['make'],
suppressionFile='supps',
warningExtractor=warningExtractor)
supps_file = textwrap.dedent("""\
# these should be suppressed:
amar-src/amar.c : XXX
.*/server-src/.* : AAA
# these should not, as the dirs do not match:
amar.c : YYY
server-src.* : BBB
""").strip()
# note that this uses the unicode smart-quotes that gcc loves so much
stdout = textwrap.dedent(u"""\
make: Entering directory \u2019amar-src\u2019
amar.c:164: warning: XXX
amar.c:165: warning: YYY
make: Leaving directory 'amar-src'
make: Entering directory "subdir"
make: Entering directory 'server-src'
make: Entering directory `one-more-dir`
holding.c:999: warning: BBB
holding.c:1000: warning: AAA
""")
exp_warning_log = textwrap.dedent("""\
amar.c:165: warning: YYY
holding.c:999: warning: BBB
""")
return self.do_test_suppressions(step, supps_file, stdout, 2,
exp_warning_log)
def test_suppressions_directories_custom(self):
def warningExtractor(step, line, match):
return line.split(':', 2)
step = shell.WarningCountingShellCommand(command=['make'],
suppressionFile='supps',
warningExtractor=warningExtractor,
directoryEnterPattern="^IN: (.*)",
directoryLeavePattern="^OUT:")
supps_file = "dir1/dir2/abc.c : .*"
stdout = textwrap.dedent(u"""\
IN: dir1
IN: decoy
OUT: decoy
IN: dir2
abc.c:123: warning: hello
""")
return self.do_test_suppressions(step, supps_file, stdout, 0, '')
def test_suppressions_linenos(self):
def warningExtractor(step, line, match):
return line.split(':', 2)
step = shell.WarningCountingShellCommand(command=['make'],
suppressionFile='supps',
warningExtractor=warningExtractor)
supps_file = "abc.c:.*:100-199\ndef.c:.*:22"
stdout = textwrap.dedent(u"""\
abc.c:99: warning: seen 1
abc.c:150: warning: unseen
def.c:22: warning: unseen
abc.c:200: warning: seen 2
""")
exp_warning_log = textwrap.dedent(u"""\
abc.c:99: warning: seen 1
abc.c:200: warning: seen 2
""")
return self.do_test_suppressions(step, supps_file, stdout, 2,
exp_warning_log)
def test_suppressions_warningExtractor_exc(self):
def warningExtractor(step, line, match):
raise RuntimeError("oh noes")
step = shell.WarningCountingShellCommand(command=['make'],
suppressionFile='supps',
warningExtractor=warningExtractor)
# need at least one supp to trigger warningExtractor
supps_file = 'x:y'
stdout = "abc.c:99: warning: seen 1"
d = self.do_test_suppressions(step, supps_file, stdout,
exp_exception=True)
d.addCallback(lambda _:
self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1))
return d
def test_suppressions_addSuppression(self):
# call addSuppression "manually" from a subclass
class MyWCSC(shell.WarningCountingShellCommand):
def start(self):
self.addSuppression([('.*', '.*unseen.*', None, None)])
return shell.WarningCountingShellCommand.start(self)
def warningExtractor(step, line, match):
return line.split(':', 2)
step = MyWCSC(command=['make'], suppressionFile='supps',
warningExtractor=warningExtractor)
stdout = textwrap.dedent(u"""\
abc.c:99: warning: seen 1
abc.c:150: warning: unseen
abc.c:200: warning: seen 2
""")
exp_warning_log = textwrap.dedent(u"""\
abc.c:99: warning: seen 1
abc.c:200: warning: seen 2
""")
return self.do_test_suppressions(step, '', stdout, 2,
exp_warning_log)
def test_suppressions_suppressionsParameter(self):
def warningExtractor(step, line, match):
return line.split(':', 2)
supps = (
("abc.c", ".*", 100, 199),
("def.c", ".*", 22, 22),
)
step = shell.WarningCountingShellCommand(command=['make'],
suppressionList=supps,
warningExtractor=warningExtractor)
stdout = textwrap.dedent(u"""\
abc.c:99: warning: seen 1
abc.c:150: warning: unseen
def.c:22: warning: unseen
abc.c:200: warning: seen 2
""")
exp_warning_log = textwrap.dedent(u"""\
abc.c:99: warning: seen 1
abc.c:200: warning: seen 2
""")
return self.do_test_suppressions(step, None, stdout, 2,
exp_warning_log)
def test_warnExtractFromRegexpGroups(self):
step = shell.WarningCountingShellCommand(command=['make'])
we = shell.WarningCountingShellCommand.warnExtractFromRegexpGroups
line, pat, exp_file, exp_lineNo, exp_text = \
('foo:123:text', '(.*):(.*):(.*)', 'foo', 123, 'text')
self.assertEqual(we(step, line, re.match(pat, line)),
(exp_file, exp_lineNo, exp_text))
def test_missing_command_error(self):
# this checks that an exception is raised for invalid arguments
self.assertRaisesConfigError(
"WarningCountingShellCommand's `command' argument is not "
"specified",
lambda: shell.WarningCountingShellCommand())
class Compile(steps.BuildStepMixin, unittest.TestCase):
def setUp(self):
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_class_args(self):
# since this step is just a pre-configured WarningCountingShellCommand,
# there' not much to test!
step = self.setupStep(shell.Compile())
self.assertEqual(step.name, "compile")
self.assertTrue(step.haltOnFailure)
self.assertTrue(step.flunkOnFailure)
self.assertEqual(step.description, ["compiling"])
self.assertEqual(step.descriptionDone, ["compile"])
self.assertEqual(step.command, ["make", "all"])
class Test(steps.BuildStepMixin, unittest.TestCase):
def setUp(self):
self.setUpBuildStep()
def tearDown(self):
self.tearDownBuildStep()
def test_setTestResults(self):
step = self.setupStep(shell.Test())
step.setTestResults(total=10, failed=3, passed=5, warnings=3)
self.assertEqual(step.statistics, {
'tests-total': 10,
'tests-failed': 3,
'tests-passed': 5,
'tests-warnings': 3,
})
# ensure that they're additive
step.setTestResults(total=1, failed=2, passed=3, warnings=4)
self.assertEqual(step.statistics, {
'tests-total': 11,
'tests-failed': 5,
'tests-passed': 8,
'tests-warnings': 7,
})
def test_describe_not_done(self):
step = self.setupStep(shell.Test())
step.rendered = True
self.assertEqual(step.describe(), None)
def test_describe_done(self):
step = self.setupStep(shell.Test())
step.rendered = True
step.statistics['tests-total'] = 93
step.statistics['tests-failed'] = 10
step.statistics['tests-passed'] = 20
step.statistics['tests-warnings'] = 30
self.assertEqual(step.describe(done=True),
['93 tests', '20 passed', '30 warnings', '10 failed'])
def test_describe_done_no_total(self):
step = self.setupStep(shell.Test())
step.rendered = True
step.statistics['tests-total'] = 0
step.statistics['tests-failed'] = 10
step.statistics['tests-passed'] = 20
step.statistics['tests-warnings'] = 30
# describe calculates 60 = 10+20+30
self.assertEqual(step.describe(done=True),
['60 tests', '20 passed', '30 warnings', '10 failed'])
| gpl-2.0 | 6,412,873,024,448,098,000 | 38.952885 | 147 | 0.550047 | false |
jptomo/rpython-lang-scheme | rpython/jit/backend/x86/test/test_runner.py | 1 | 25702 | import py
from rpython.rtyper.lltypesystem import lltype, llmemory, rffi, rstr
from rpython.jit.metainterp.history import ResOperation, TargetToken,\
JitCellToken
from rpython.jit.metainterp.history import (ConstInt, ConstPtr, Const,
BasicFailDescr, BasicFinalDescr)
from rpython.jit.backend.detect_cpu import getcpuclass
from rpython.jit.backend.x86.arch import WORD
from rpython.jit.backend.x86.rx86 import fits_in_32bits
from rpython.jit.backend.llsupport import symbolic
from rpython.jit.metainterp.resoperation import rop, InputArgInt, InputArgRef
from rpython.jit.metainterp.executor import execute
from rpython.jit.backend.test.runner_test import LLtypeBackendTest
from rpython.jit.tool.oparser import parse
import ctypes
CPU = getcpuclass()
class FakeStats(object):
pass
U = LLtypeBackendTest.U
S = LLtypeBackendTest.S
# ____________________________________________________________
class TestX86(LLtypeBackendTest):
# for the individual tests see
# ====> ../../test/runner_test.py
if WORD == 4:
add_loop_instructions = ('mov; '
'lea; ' # a nop, for the label
'add; test; je; jmp;') # plus some padding
bridge_loop_instructions = 'cmp; jge; mov; mov; call; jmp;'
else:
add_loop_instructions = ('mov; '
'nop; ' # for the label
'add; test; je; jmp;') # plus some padding
bridge_loop_instructions = (
'cmp; jge; mov;( movabs;)? mov; mov(abs)?; call; mov(abs)?; jmp;')
def get_cpu(self):
cpu = CPU(rtyper=None, stats=FakeStats())
cpu.setup_once()
return cpu
def test_execute_ptr_operation(self):
cpu = self.cpu
u_box, _, _ = self.alloc_instance(U)
u = u_box.getref(lltype.Ptr(U))
ofs = cpu.fielddescrof(S, 'value')
assert self.execute_operation(rop.SETFIELD_GC,
[u_box, InputArgInt(3)],
'void', ofs) == None
assert u.parent.parent.value == 3
u.parent.parent.value += 100
assert (self.execute_operation(rop.GETFIELD_GC_I, [u_box], 'int', ofs)
== 103)
def test_unicode(self):
ofs = symbolic.get_field_token(rstr.UNICODE, 'chars', False)[0]
u = rstr.mallocunicode(13)
for i in range(13):
u.chars[i] = unichr(ord(u'a') + i)
b = InputArgRef(lltype.cast_opaque_ptr(llmemory.GCREF, u))
r = self.execute_operation(rop.UNICODEGETITEM, [b, ConstInt(2)], 'int')
assert r == ord(u'a') + 2
self.execute_operation(rop.UNICODESETITEM, [b, ConstInt(2),
ConstInt(ord(u'z'))],
'void')
assert u.chars[2] == u'z'
assert u.chars[3] == u'd'
@staticmethod
def _resbuf(res, item_tp=ctypes.c_long):
return ctypes.cast(res._obj.intval, ctypes.POINTER(item_tp))
def test_allocations(self):
py.test.skip("rewrite or kill")
from rpython.rtyper.lltypesystem import rstr
allocs = [None]
all = []
orig_new = self.cpu.gc_ll_descr.funcptr_for_new
def f(size):
allocs.insert(0, size)
return orig_new(size)
self.cpu.assembler.setup_once()
self.cpu.gc_ll_descr.funcptr_for_new = f
ofs = symbolic.get_field_token(rstr.STR, 'chars', False)[0]
res = self.execute_operation(rop.NEWSTR, [ConstInt(7)], 'ref')
assert allocs[0] == 7 + ofs + WORD
resbuf = self._resbuf(res)
assert resbuf[ofs/WORD] == 7
# ------------------------------------------------------------
res = self.execute_operation(rop.NEWSTR, [InputArgInt(7)], 'ref')
assert allocs[0] == 7 + ofs + WORD
resbuf = self._resbuf(res)
assert resbuf[ofs/WORD] == 7
# ------------------------------------------------------------
TP = lltype.GcArray(lltype.Signed)
ofs = symbolic.get_field_token(TP, 'length', False)[0]
descr = self.cpu.arraydescrof(TP)
res = self.execute_operation(rop.NEW_ARRAY, [ConstInt(10)],
'ref', descr)
assert allocs[0] == 10*WORD + ofs + WORD
resbuf = self._resbuf(res)
assert resbuf[ofs/WORD] == 10
# ------------------------------------------------------------
res = self.execute_operation(rop.NEW_ARRAY, [InputArgInt(10)],
'ref', descr)
assert allocs[0] == 10*WORD + ofs + WORD
resbuf = self._resbuf(res)
assert resbuf[ofs/WORD] == 10
def test_stringitems(self):
from rpython.rtyper.lltypesystem.rstr import STR
ofs = symbolic.get_field_token(STR, 'chars', False)[0]
ofs_items = symbolic.get_field_token(STR.chars, 'items', False)[0]
res = self.execute_operation(rop.NEWSTR, [ConstInt(10)], 'ref')
self.execute_operation(rop.STRSETITEM, [InputArgRef(res), ConstInt(2), ConstInt(ord('d'))], 'void')
resbuf = self._resbuf(res, ctypes.c_char)
assert resbuf[ofs + ofs_items + 2] == 'd'
self.execute_operation(rop.STRSETITEM, [InputArgRef(res), InputArgInt(2), ConstInt(ord('z'))], 'void')
assert resbuf[ofs + ofs_items + 2] == 'z'
r = self.execute_operation(rop.STRGETITEM, [InputArgRef(res), InputArgInt(2)], 'int')
assert r == ord('z')
def test_arrayitems(self):
TP = lltype.GcArray(lltype.Signed)
ofs = symbolic.get_field_token(TP, 'length', False)[0]
itemsofs = symbolic.get_field_token(TP, 'items', False)[0]
descr = self.cpu.arraydescrof(TP)
res = self.execute_operation(rop.NEW_ARRAY, [ConstInt(10)],
'ref', descr)
resbuf = self._resbuf(res)
assert resbuf[ofs/WORD] == 10
self.execute_operation(rop.SETARRAYITEM_GC, [InputArgRef(res),
ConstInt(2), InputArgInt(38)],
'void', descr)
assert resbuf[itemsofs/WORD + 2] == 38
self.execute_operation(rop.SETARRAYITEM_GC, [InputArgRef(res),
InputArgInt(3), InputArgInt(42)],
'void', descr)
assert resbuf[itemsofs/WORD + 3] == 42
r = self.execute_operation(rop.GETARRAYITEM_GC_I, [InputArgRef(res), ConstInt(2)],
'int', descr)
assert r == 38
r = self.execute_operation(rop.GETARRAYITEM_GC_I, [ConstPtr(res),
InputArgInt(2)],
'int', descr)
assert r == 38
r = self.execute_operation(rop.GETARRAYITEM_GC_I, [ConstPtr(res),
ConstInt(2)],
'int', descr)
assert r == 38
r = self.execute_operation(rop.GETARRAYITEM_GC_I, [InputArgRef(res),
InputArgInt(2)],
'int', descr)
assert r == 38
r = self.execute_operation(rop.GETARRAYITEM_GC_I, [InputArgRef(res), InputArgInt(3)],
'int', descr)
assert r == 42
def test_arrayitems_not_int(self):
TP = lltype.GcArray(lltype.Char)
ofs = symbolic.get_field_token(TP, 'length', False)[0]
itemsofs = symbolic.get_field_token(TP, 'items', False)[0]
descr = self.cpu.arraydescrof(TP)
res = self.execute_operation(rop.NEW_ARRAY, [ConstInt(10)],
'ref', descr)
resbuf = self._resbuf(res, ctypes.c_char)
res = InputArgRef(res)
assert resbuf[ofs] == chr(10)
for i in range(10):
self.execute_operation(rop.SETARRAYITEM_GC, [res,
ConstInt(i), InputArgInt(i)],
'void', descr)
for i in range(10):
assert resbuf[itemsofs + i] == chr(i)
for i in range(10):
r = self.execute_operation(rop.GETARRAYITEM_GC_I, [res,
ConstInt(i)],
'int', descr)
assert r == i
def test_getfield_setfield(self):
TP = lltype.GcStruct('x', ('s', lltype.Signed),
('i', rffi.INT),
('f', lltype.Float),
('u', rffi.USHORT),
('c1', lltype.Char),
('c2', lltype.Char),
('c3', lltype.Char))
res = InputArgRef(self.execute_operation(rop.NEW, [],
'ref', self.cpu.sizeof(TP)))
ofs_s = self.cpu.fielddescrof(TP, 's')
ofs_i = self.cpu.fielddescrof(TP, 'i')
#ofs_f = self.cpu.fielddescrof(TP, 'f')
ofs_u = self.cpu.fielddescrof(TP, 'u')
ofsc1 = self.cpu.fielddescrof(TP, 'c1')
ofsc2 = self.cpu.fielddescrof(TP, 'c2')
ofsc3 = self.cpu.fielddescrof(TP, 'c3')
self.execute_operation(rop.SETFIELD_GC, [res, ConstInt(3)], 'void',
ofs_s)
# XXX ConstFloat
#self.execute_operation(rop.SETFIELD_GC, [res, ofs_f, 1e100], 'void')
# XXX we don't support shorts (at all)
#self.execute_operation(rop.SETFIELD_GC, [res, ofs_u, ConstInt(5)], 'void')
s = self.execute_operation(rop.GETFIELD_GC_I, [res], 'int', ofs_s)
assert s == 3
self.execute_operation(rop.SETFIELD_GC, [res, InputArgInt(3)], 'void',
ofs_s)
s = self.execute_operation(rop.GETFIELD_GC_I, [res], 'int', ofs_s)
assert s == 3
self.execute_operation(rop.SETFIELD_GC, [res, InputArgInt(1234)], 'void', ofs_i)
i = self.execute_operation(rop.GETFIELD_GC_I, [res], 'int', ofs_i)
assert i == 1234
#u = self.execute_operation(rop.GETFIELD_GC, [res, ofs_u], 'int')
#assert u.value == 5
self.execute_operation(rop.SETFIELD_GC, [res, ConstInt(1)], 'void',
ofsc1)
self.execute_operation(rop.SETFIELD_GC, [res, ConstInt(3)], 'void',
ofsc3)
self.execute_operation(rop.SETFIELD_GC, [res, ConstInt(2)], 'void',
ofsc2)
c = self.execute_operation(rop.GETFIELD_GC_I, [res], 'int', ofsc1)
assert c == 1
c = self.execute_operation(rop.GETFIELD_GC_I, [res], 'int', ofsc2)
assert c == 2
c = self.execute_operation(rop.GETFIELD_GC_I, [res], 'int', ofsc3)
assert c == 3
def test_bug_setfield_64bit(self):
if WORD == 4:
py.test.skip("only for 64 bits")
TP = lltype.GcStruct('S', ('i', lltype.Signed))
ofsi = self.cpu.fielddescrof(TP, 'i')
for i in range(500):
p = lltype.malloc(TP)
addr = rffi.cast(lltype.Signed, p)
if fits_in_32bits(addr):
break # fitting in 32 bits, good
else:
py.test.skip("cannot get a 32-bit pointer")
res = ConstPtr(rffi.cast(llmemory.GCREF, addr))
self.execute_operation(rop.SETFIELD_RAW, [res, ConstInt(3**33)],
'void', ofsi)
assert p.i == 3**33
def test_and_mask_common_patterns(self):
cases = [8, 16, 24]
if WORD == 8:
cases.append(32)
for i in cases:
box = InputArgInt(0xAAAAAAAAAAAA)
res = self.execute_operation(rop.INT_AND,
[box, ConstInt(2 ** i - 1)],
'int')
assert res == 0xAAAAAAAAAAAA & (2 ** i - 1)
def test_nullity_with_guard(self):
allops = [rop.INT_IS_TRUE]
guards = [rop.GUARD_TRUE, rop.GUARD_FALSE]
p = lltype.cast_opaque_ptr(llmemory.GCREF,
lltype.malloc(lltype.GcStruct('x')))
nullptr = lltype.nullptr(llmemory.GCREF.TO)
f = InputArgInt()
for op in allops:
for guard in guards:
if op == rop.INT_IS_TRUE:
bp = InputArgInt(1)
n = InputArgInt(0)
else:
bp = InputArgRef(p)
n = InputArgRef(nullptr)
for b in (bp, n):
i1 = ResOperation(rop.SAME_AS_I, [ConstInt(1)])
f = ResOperation(op, [b])
ops = [
i1,
f,
ResOperation(guard, [f],
descr=BasicFailDescr()),
ResOperation(rop.FINISH, [ConstInt(0)],
descr=BasicFinalDescr()),
]
ops[-2].setfailargs([i1])
looptoken = JitCellToken()
self.cpu.compile_loop([b], ops, looptoken)
deadframe = self.cpu.execute_token(looptoken, b.getint())
result = self.cpu.get_int_value(deadframe, 0)
if guard == rop.GUARD_FALSE:
assert result == execute(self.cpu, None,
op, None, b)
else:
assert result != execute(self.cpu, None,
op, None, b)
def test_stuff_followed_by_guard(self):
boxes = [(InputArgInt(1), InputArgInt(0)),
(InputArgInt(0), InputArgInt(1)),
(InputArgInt(1), InputArgInt(1)),
(InputArgInt(-1), InputArgInt(1)),
(InputArgInt(1), InputArgInt(-1)),
(ConstInt(1), InputArgInt(0)),
(ConstInt(0), InputArgInt(1)),
(ConstInt(1), InputArgInt(1)),
(ConstInt(-1), InputArgInt(1)),
(ConstInt(1), InputArgInt(-1)),
(InputArgInt(1), ConstInt(0)),
(InputArgInt(0), ConstInt(1)),
(InputArgInt(1), ConstInt(1)),
(InputArgInt(-1), ConstInt(1)),
(InputArgInt(1), ConstInt(-1))]
guards = [rop.GUARD_FALSE, rop.GUARD_TRUE]
all = [rop.INT_EQ, rop.INT_NE, rop.INT_LE, rop.INT_LT, rop.INT_GT,
rop.INT_GE, rop.UINT_GT, rop.UINT_LT, rop.UINT_LE, rop.UINT_GE]
for a, b in boxes:
for guard in guards:
for op in all:
i1 = ResOperation(rop.SAME_AS_I, [ConstInt(1)])
res = ResOperation(op, [a, b])
ops = [
i1, res,
ResOperation(guard, [res],
descr=BasicFailDescr()),
ResOperation(rop.FINISH, [ConstInt(0)],
descr=BasicFinalDescr()),
]
ops[-2].setfailargs([i1])
inputargs = [i for i in (a, b) if not isinstance(i, Const)]
looptoken = JitCellToken()
self.cpu.compile_loop(inputargs, ops, looptoken)
inputvalues = [box.getint() for box in inputargs]
deadframe = self.cpu.execute_token(looptoken, *inputvalues)
result = self.cpu.get_int_value(deadframe, 0)
expected = execute(self.cpu, None, op, None, a, b)
if guard == rop.GUARD_FALSE:
assert result == expected
else:
assert result != expected
def test_compile_bridge_check_profile_info(self):
py.test.skip("does not work, reinvestigate")
class FakeProfileAgent(object):
def __init__(self):
self.functions = []
def native_code_written(self, name, address, size):
self.functions.append((name, address, size))
self.cpu.profile_agent = agent = FakeProfileAgent()
i0 = InputArgInt()
i1 = InputArgInt()
i2 = InputArgInt()
targettoken = TargetToken()
faildescr1 = BasicFailDescr(1)
faildescr2 = BasicFailDescr(2)
looptoken = JitCellToken()
looptoken.number = 17
class FakeString(object):
def __init__(self, val):
self.val = val
def _get_str(self):
return self.val
operations = [
ResOperation(rop.LABEL, [i0], None, descr=targettoken),
ResOperation(rop.DEBUG_MERGE_POINT, [FakeString("hello"), 0, 0], None),
ResOperation(rop.INT_ADD, [i0, ConstInt(1)], i1),
ResOperation(rop.INT_LE, [i1, ConstInt(9)], i2),
ResOperation(rop.GUARD_TRUE, [i2], None, descr=faildescr1),
ResOperation(rop.JUMP, [i1], None, descr=targettoken),
]
inputargs = [i0]
operations[-2].setfailargs([i1])
self.cpu.compile_loop(inputargs, operations, looptoken)
name, loopaddress, loopsize = agent.functions[0]
assert name == "Loop # 17: hello (loop counter 0)"
assert loopaddress <= looptoken._ll_loop_code
assert loopsize >= 40 # randomish number
i1b = InputArgInt()
i3 = InputArgInt()
bridge = [
ResOperation(rop.INT_LE, [i1b, ConstInt(19)], i3),
ResOperation(rop.GUARD_TRUE, [i3], None, descr=faildescr2),
ResOperation(rop.DEBUG_MERGE_POINT, [FakeString("bye"), 0, 0], None),
ResOperation(rop.JUMP, [i1b], None, descr=targettoken),
]
bridge[1].setfailargs([i1b])
self.cpu.compile_bridge(faildescr1, [i1b], bridge, looptoken)
name, address, size = agent.functions[1]
assert name == "Bridge # 0: bye (loop counter 1)"
# Would be exactly ==, but there are some guard failure recovery
# stubs in-between
assert address >= loopaddress + loopsize
assert size >= 10 # randomish number
deadframe = self.cpu.execute_token(looptoken, 2)
fail = self.cpu.get_latest_descr(deadframe)
assert fail.identifier == 2
res = self.cpu.get_int_value(deadframe, 0)
assert res == 20
def test_ops_offset(self):
from rpython.rlib import debug
looptoken = JitCellToken()
targettoken = TargetToken()
loop = parse("""
[i0]
label(i0, descr=targettoken)
i1 = int_add(i0, 1)
i2 = int_le(i1, 9)
jump(i1, descr=targettoken)
""", namespace=locals())
debug._log = dlog = debug.DebugLog()
info = self.cpu.compile_loop(loop.inputargs, loop.operations, looptoken)
ops_offset = info.ops_offset
debug._log = None
#
assert ops_offset is looptoken._x86_ops_offset
# 2*increment_debug_counter + ops + None
assert len(ops_offset) == 2 + len(loop.operations) + 1
assert (ops_offset[loop.operations[0]] <=
ops_offset[loop.operations[1]] <=
ops_offset[loop.operations[2]] <=
ops_offset[None])
def test_calling_convention(self, monkeypatch):
if WORD != 4:
py.test.skip("32-bit only test")
from rpython.jit.backend.x86.regloc import eax, edx
from rpython.jit.backend.x86 import codebuf, callbuilder
from rpython.jit.codewriter.effectinfo import EffectInfo
from rpython.rlib.libffi import types, clibffi
had_stdcall = hasattr(clibffi, 'FFI_STDCALL')
if not had_stdcall: # not running on Windows, but we can still test
monkeypatch.setattr(clibffi, 'FFI_STDCALL', 12345, raising=False)
monkeypatch.setattr(callbuilder, 'stdcall_or_cdecl', True)
else:
assert callbuilder.stdcall_or_cdecl
#
for real_ffi, reported_ffi in [
(clibffi.FFI_DEFAULT_ABI, clibffi.FFI_DEFAULT_ABI),
(clibffi.FFI_STDCALL, clibffi.FFI_DEFAULT_ABI),
(clibffi.FFI_STDCALL, clibffi.FFI_STDCALL)]:
cpu = self.cpu
mc = codebuf.MachineCodeBlockWrapper()
mc.MOV_rs(eax.value, 4) # argument 1
mc.MOV_rs(edx.value, 40) # argument 10
mc.SUB_rr(eax.value, edx.value) # return arg1 - arg10
if real_ffi == clibffi.FFI_DEFAULT_ABI:
mc.RET()
else:
mc.RET16_i(40)
rawstart = mc.materialize(cpu, [])
#
calldescr = cpu._calldescr_dynamic_for_tests([types.slong] * 10,
types.slong)
calldescr.get_call_conv = lambda: reported_ffi # <==== hack
# ^^^ we patch get_call_conv() so that the test also makes sense
# on Linux, because clibffi.get_call_conv() would always
# return FFI_DEFAULT_ABI on non-Windows platforms.
funcbox = ConstInt(rawstart)
i1 = InputArgInt()
i2 = InputArgInt()
c = ConstInt(-1)
faildescr = BasicFailDescr(1)
cz = ConstInt(0)
# we must call it repeatedly: if the stack pointer gets increased
# by 40 bytes by the STDCALL call, and if we don't expect it,
# then we are going to get our stack emptied unexpectedly by
# several repeated calls
ops = [
ResOperation(rop.CALL_RELEASE_GIL_I,
[cz, funcbox, i1, c, c, c, c, c, c, c, c, i2],
descr=calldescr),
ResOperation(rop.GUARD_NOT_FORCED, [], descr=faildescr),
ResOperation(rop.CALL_RELEASE_GIL_I,
[cz, funcbox, i1, c, c, c, c, c, c, c, c, i2],
descr=calldescr),
ResOperation(rop.GUARD_NOT_FORCED, [], descr=faildescr),
ResOperation(rop.CALL_RELEASE_GIL_I,
[cz, funcbox, i1, c, c, c, c, c, c, c, c, i2],
descr=calldescr),
ResOperation(rop.GUARD_NOT_FORCED, [], descr=faildescr),
ResOperation(rop.CALL_RELEASE_GIL_I,
[cz, funcbox, i1, c, c, c, c, c, c, c, c, i2],
descr=calldescr),
ResOperation(rop.GUARD_NOT_FORCED, [], descr=faildescr),
]
i3 = ops[0]
i4 = ops[2]
i5 = ops[4]
i6 = ops[6]
ops += [
ResOperation(rop.GUARD_FALSE, [i3],
descr=BasicFailDescr(0)),
ResOperation(rop.FINISH, [],
descr=BasicFinalDescr(1))
]
ops[-2].setfailargs([i3, i4, i5, i6])
ops[1].setfailargs([])
ops[3].setfailargs([])
ops[5].setfailargs([])
ops[7].setfailargs([])
looptoken = JitCellToken()
self.cpu.compile_loop([i1, i2], ops, looptoken)
deadframe = self.cpu.execute_token(looptoken, 123450, 123408)
fail = self.cpu.get_latest_descr(deadframe)
assert fail.identifier == 0
assert self.cpu.get_int_value(deadframe, 0) == 42
assert self.cpu.get_int_value(deadframe, 1) == 42
assert self.cpu.get_int_value(deadframe, 2) == 42
assert self.cpu.get_int_value(deadframe, 3) == 42
class TestDebuggingAssembler(object):
def setup_method(self, meth):
self.cpu = CPU(rtyper=None, stats=FakeStats())
self.cpu.setup_once()
def test_debugger_on(self):
from rpython.tool.logparser import parse_log_file, extract_category
from rpython.rlib import debug
targettoken, preambletoken = TargetToken(), TargetToken()
loop = """
[i0]
label(i0, descr=preambletoken)
debug_merge_point('xyz', 0, 0)
i1 = int_add(i0, 1)
i2 = int_ge(i1, 10)
guard_false(i2) []
label(i1, descr=targettoken)
debug_merge_point('xyz', 0, 0)
i11 = int_add(i1, 1)
i12 = int_ge(i11, 10)
guard_false(i12) []
jump(i11, descr=targettoken)
"""
ops = parse(loop, namespace={'targettoken': targettoken,
'preambletoken': preambletoken})
debug._log = dlog = debug.DebugLog()
try:
self.cpu.assembler.set_debug(True)
looptoken = JitCellToken()
self.cpu.compile_loop(ops.inputargs, ops.operations, looptoken)
self.cpu.execute_token(looptoken, 0)
# check debugging info
struct = self.cpu.assembler.loop_run_counters[0]
assert struct.i == 1
struct = self.cpu.assembler.loop_run_counters[1]
assert struct.i == 1
struct = self.cpu.assembler.loop_run_counters[2]
assert struct.i == 9
self.cpu.finish_once()
finally:
debug._log = None
l0 = ('debug_print', 'entry -1:1')
l1 = ('debug_print', preambletoken.repr_of_descr() + ':1')
l2 = ('debug_print', targettoken.repr_of_descr() + ':9')
assert ('jit-backend-counts', [l0, l1, l2]) in dlog
| mit | -5,869,284,651,135,382,000 | 42.636672 | 110 | 0.509221 | false |
dequis/qtile | docs/conf.py | 1 | 9459 | # -*- coding: utf-8 -*-
#
# Qtile documentation build configuration file, created by
# sphinx-quickstart on Sat Feb 11 15:20:21 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
try:
# Python >=3.3
from unittest.mock import MagicMock
except ImportError:
from mock import MagicMock
class Mock(MagicMock):
# xcbq does a dir() on objects and pull stuff out of them and tries to sort
# the result. MagicMock has a bunch of stuff that can't be sorted, so let's
# like about dir().
def __dir__(self):
return []
MOCK_MODULES = [
'libqtile._ffi_pango',
'libqtile._ffi_xcursors',
'cairocffi',
'cffi',
'dateutil',
'dateutil.parser',
'dbus',
'dbus.mainloop.glib',
'iwlib',
'keyring',
'mpd',
'trollius',
'xcffib',
'xcffib.randr',
'xcffib.xfixes',
'xcffib.xinerama',
'xcffib.xproto',
'xdg.IconTheme',
]
sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('../'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.coverage',
'sphinx.ext.graphviz',
'sphinx.ext.todo',
'sphinx.ext.viewcode',
'sphinxcontrib.seqdiag',
'sphinx_qtile',
'numpydoc',
]
numpydoc_show_class_members = False
# Add any paths that contain templates here, relative to this directory.
templates_path = []
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Qtile'
copyright = u'2008-2016, Aldo Cortesi and contributers'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.10.5'
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', 'man']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output --------fautod-------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = '_static/favicon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {'index': 'index.html'}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Qtiledoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Qtile.tex', u'Qtile Documentation',
u'Aldo Cortesi', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('man/qtile', 'qtile', u'Qtile Documentation',
[u'Tycho Andersen'], 1),
('man/qsh', 'qsh', u'Qtile Documentation',
[u'Tycho Andersen'], 1),
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Qtile', u'Qtile Documentation',
u'Aldo Cortesi', 'Qtile', 'A hackable tiling window manager.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# only import and set the theme if we're building docs locally
if not os.environ.get('READTHEDOCS', None):
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
graphviz_dot_args = ['-Lg']
# A workaround for the responsive tables always having annoying scrollbars.
def setup(app):
app.add_stylesheet("no_scrollbars.css")
| mit | 5,405,494,409,025,351,000 | 29.61165 | 86 | 0.693625 | false |
bolkedebruin/airflow | airflow/providers/amazon/aws/operators/datasync.py | 1 | 16833 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Create, get, update, execute and delete an AWS DataSync Task.
"""
import random
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.providers.amazon.aws.hooks.datasync import AWSDataSyncHook
from airflow.utils.decorators import apply_defaults
# pylint: disable=too-many-instance-attributes, too-many-arguments
class AWSDataSyncOperator(BaseOperator):
r"""Find, Create, Update, Execute and Delete AWS DataSync Tasks.
If ``do_xcom_push`` is True, then the DataSync TaskArn and TaskExecutionArn
which were executed will be pushed to an XCom.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:AWSDataSyncOperator`
.. note:: There may be 0, 1, or many existing DataSync Tasks defined in your AWS
environment. The default behavior is to create a new Task if there are 0, or
execute the Task if there was 1 Task, or fail if there were many Tasks.
:param str aws_conn_id: AWS connection to use.
:param int wait_interval_seconds: Time to wait between two
consecutive calls to check TaskExecution status.
:param str task_arn: AWS DataSync TaskArn to use. If None, then this operator will
attempt to either search for an existing Task or attempt to create a new Task.
:param str source_location_uri: Source location URI to search for. All DataSync
Tasks with a LocationArn with this URI will be considered.
Example: ``smb://server/subdir``
:param str destination_location_uri: Destination location URI to search for.
All DataSync Tasks with a LocationArn with this URI will be considered.
Example: ``s3://airflow_bucket/stuff``
:param bool allow_random_task_choice: If multiple Tasks match, one must be chosen to
execute. If allow_random_task_choice is True then a random one is chosen.
:param bool allow_random_location_choice: If multiple Locations match, one must be chosen
when creating a task. If allow_random_location_choice is True then a random one is chosen.
:param dict create_task_kwargs: If no suitable TaskArn is identified,
it will be created if ``create_task_kwargs`` is defined.
``create_task_kwargs`` is then used internally like this:
``boto3.create_task(**create_task_kwargs)``
Example: ``{'Name': 'xyz', 'Options': ..., 'Excludes': ..., 'Tags': ...}``
:param dict create_source_location_kwargs: If no suitable LocationArn is found,
a Location will be created if ``create_source_location_kwargs`` is defined.
``create_source_location_kwargs`` is then used internally like this:
``boto3.create_location_xyz(**create_source_location_kwargs)``
The xyz is determined from the prefix of source_location_uri, eg ``smb:/...`` or ``s3:/...``
Example: ``{'Subdirectory': ..., 'ServerHostname': ..., ...}``
:param dict create_destination_location_kwargs: If no suitable LocationArn is found,
a Location will be created if ``create_destination_location_kwargs`` is defined.
``create_destination_location_kwargs`` is used internally like this:
``boto3.create_location_xyz(**create_destination_location_kwargs)``
The xyz is determined from the prefix of destination_location_uri, eg ``smb:/...` or ``s3:/...``
Example: ``{'S3BucketArn': ..., 'S3Config': {'BucketAccessRoleArn': ...}, ...}``
:param dict update_task_kwargs: If a suitable TaskArn is found or created,
it will be updated if ``update_task_kwargs`` is defined.
``update_task_kwargs`` is used internally like this:
``boto3.update_task(TaskArn=task_arn, **update_task_kwargs)``
Example: ``{'Name': 'xyz', 'Options': ..., 'Excludes': ...}``
:param dict task_execution_kwargs: Additional kwargs passed directly when starting the
Task execution, used internally like this:
``boto3.start_task_execution(TaskArn=task_arn, **task_execution_kwargs)``
:param bool delete_task_after_execution: If True then the TaskArn which was executed
will be deleted from AWS DataSync on successful completion.
:raises AirflowException: If ``task_arn`` was not specified, or if
either ``source_location_uri`` or ``destination_location_uri`` were
not specified.
:raises AirflowException: If source or destination Location weren't found
and could not be created.
:raises AirflowException: If ``choose_task`` or ``choose_location`` fails.
:raises AirflowException: If Task creation, update, execution or delete fails.
"""
template_fields = (
"task_arn",
"source_location_uri",
"destination_location_uri",
"create_task_kwargs",
"create_source_location_kwargs",
"create_destination_location_kwargs",
"update_task_kwargs",
"task_execution_kwargs"
)
ui_color = "#44b5e2"
@apply_defaults
def __init__(
self,
aws_conn_id="aws_default",
wait_interval_seconds=5,
task_arn=None,
source_location_uri=None,
destination_location_uri=None,
allow_random_task_choice=False,
allow_random_location_choice=False,
create_task_kwargs=None,
create_source_location_kwargs=None,
create_destination_location_kwargs=None,
update_task_kwargs=None,
task_execution_kwargs=None,
delete_task_after_execution=False,
*args,
**kwargs
):
super().__init__(*args, **kwargs)
# Assignments
self.aws_conn_id = aws_conn_id
self.wait_interval_seconds = wait_interval_seconds
self.task_arn = task_arn
self.source_location_uri = source_location_uri
self.destination_location_uri = destination_location_uri
self.allow_random_task_choice = allow_random_task_choice
self.allow_random_location_choice = allow_random_location_choice
self.create_task_kwargs = create_task_kwargs if create_task_kwargs else dict()
self.create_source_location_kwargs = dict()
if create_source_location_kwargs:
self.create_source_location_kwargs = create_source_location_kwargs
self.create_destination_location_kwargs = dict()
if create_destination_location_kwargs:
self.create_destination_location_kwargs = create_destination_location_kwargs
self.update_task_kwargs = update_task_kwargs if update_task_kwargs else dict()
self.task_execution_kwargs = task_execution_kwargs if task_execution_kwargs else dict()
self.delete_task_after_execution = delete_task_after_execution
# Validations
valid = False
if self.task_arn:
valid = True
if self.source_location_uri and self.destination_location_uri:
valid = True
if not valid:
raise AirflowException(
"Either specify task_arn or both source_location_uri and destination_location_uri. "
"task_arn={0} source_location_uri={1} destination_location_uri={2}".format(
task_arn, source_location_uri, destination_location_uri
)
)
# Others
self.hook = None
# Candidates - these are found in AWS as possible things
# for us to use
self.candidate_source_location_arns = None
self.candidate_destination_location_arns = None
self.candidate_task_arns = None
# Actuals
self.source_location_arn = None
self.destination_location_arn = None
self.task_execution_arn = None
def get_hook(self):
"""Create and return AWSDataSyncHook.
:return AWSDataSyncHook: An AWSDataSyncHook instance.
"""
if not self.hook:
self.hook = AWSDataSyncHook(
aws_conn_id=self.aws_conn_id,
wait_interval_seconds=self.wait_interval_seconds,
)
return self.hook
def execute(self, context):
# If task_arn was not specified then try to
# find 0, 1 or many candidate DataSync Tasks to run
if not self.task_arn:
self._get_tasks_and_locations()
# If some were found, identify which one to run
if self.candidate_task_arns:
self.task_arn = self.choose_task(
self.candidate_task_arns)
# If we couldnt find one then try create one
if not self.task_arn and self.create_task_kwargs:
self._create_datasync_task()
if not self.task_arn:
raise AirflowException(
"DataSync TaskArn could not be identified or created.")
self.log.info("Using DataSync TaskArn %s", self.task_arn)
# Update the DataSync Task
if self.update_task_kwargs:
self._update_datasync_task()
# Execute the DataSync Task
self._execute_datasync_task()
if not self.task_execution_arn:
raise AirflowException("Nothing was executed")
# Delete the DataSyncTask
if self.delete_task_after_execution:
self._delete_datasync_task()
return {"TaskArn": self.task_arn, "TaskExecutionArn": self.task_execution_arn}
def _get_tasks_and_locations(self):
"""Find existing DataSync Task based on source and dest Locations."""
hook = self.get_hook()
self.candidate_source_location_arns = self._get_location_arns(
self.source_location_uri
)
self.candidate_destination_location_arns = self._get_location_arns(
self.destination_location_uri
)
if not self.candidate_source_location_arns:
self.log.info("No matching source Locations")
return
if not self.candidate_destination_location_arns:
self.log.info("No matching destination Locations")
return
self.log.info("Finding DataSync TaskArns that have these LocationArns")
self.candidate_task_arns = hook.get_task_arns_for_location_arns(
self.candidate_source_location_arns,
self.candidate_destination_location_arns,
)
self.log.info("Found candidate DataSync TaskArns %s",
self.candidate_task_arns)
def choose_task(self, task_arn_list):
"""Select 1 DataSync TaskArn from a list"""
if not task_arn_list:
return None
if len(task_arn_list) == 1:
return task_arn_list[0]
if self.allow_random_task_choice:
# Items are unordered so we dont want to just take
# the [0] one as it implies ordered items were received
# from AWS and might lead to confusion. Rather explicitly
# choose a random one
return random.choice(task_arn_list)
raise AirflowException(
"Unable to choose a Task from {}".format(task_arn_list))
def choose_location(self, location_arn_list):
"""Select 1 DataSync LocationArn from a list"""
if not location_arn_list:
return None
if len(location_arn_list) == 1:
return location_arn_list[0]
if self.allow_random_location_choice:
# Items are unordered so we dont want to just take
# the [0] one as it implies ordered items were received
# from AWS and might lead to confusion. Rather explicitly
# choose a random one
return random.choice(location_arn_list)
raise AirflowException(
"Unable to choose a Location from {}".format(location_arn_list))
def _create_datasync_task(self):
"""Create a AWS DataSyncTask."""
hook = self.get_hook()
self.source_location_arn = self.choose_location(
self.candidate_source_location_arns
)
if not self.source_location_arn and self.create_source_location_kwargs:
self.log.info('Attempting to create source Location')
self.source_location_arn = hook.create_location(
self.source_location_uri, **self.create_source_location_kwargs
)
if not self.source_location_arn:
raise AirflowException(
"Unable to determine source LocationArn."
" Does a suitable DataSync Location exist?")
self.destination_location_arn = self.choose_location(
self.candidate_destination_location_arns
)
if not self.destination_location_arn and self.create_destination_location_kwargs:
self.log.info('Attempting to create destination Location')
self.destination_location_arn = hook.create_location(
self.destination_location_uri, **self.create_destination_location_kwargs
)
if not self.destination_location_arn:
raise AirflowException(
"Unable to determine destination LocationArn."
" Does a suitable DataSync Location exist?")
self.log.info("Creating a Task.")
self.task_arn = hook.create_task(
self.source_location_arn,
self.destination_location_arn,
**self.create_task_kwargs
)
if not self.task_arn:
raise AirflowException("Task could not be created")
self.log.info("Created a Task with TaskArn %s", self.task_arn)
return self.task_arn
def _update_datasync_task(self):
"""Update a AWS DataSyncTask."""
hook = self.get_hook()
self.log.info("Updating TaskArn %s", self.task_arn)
hook.update_task(self.task_arn, **self.update_task_kwargs)
self.log.info("Updated TaskArn %s", self.task_arn)
return self.task_arn
def _execute_datasync_task(self):
"""Create and monitor an AWSDataSync TaskExecution for a Task."""
hook = self.get_hook()
# Create a task execution:
self.log.info("Starting execution for TaskArn %s", self.task_arn)
self.task_execution_arn = hook.start_task_execution(
self.task_arn, **self.task_execution_kwargs)
self.log.info("Started TaskExecutionArn %s", self.task_execution_arn)
# Wait for task execution to complete
self.log.info("Waiting for TaskExecutionArn %s",
self.task_execution_arn)
result = hook.wait_for_task_execution(self.task_execution_arn)
self.log.info("Completed TaskExecutionArn %s", self.task_execution_arn)
task_execution_description = hook.describe_task_execution(
task_execution_arn=self.task_execution_arn
)
self.log.info("task_execution_description=%s",
task_execution_description)
if not result:
raise AirflowException(
"Failed TaskExecutionArn %s" % self.task_execution_arn
)
return self.task_execution_arn
def on_kill(self):
"""Cancel the submitted DataSync task."""
hook = self.get_hook()
if self.task_execution_arn:
self.log.info("Cancelling TaskExecutionArn %s",
self.task_execution_arn)
hook.cancel_task_execution(
task_execution_arn=self.task_execution_arn)
self.log.info("Cancelled TaskExecutionArn %s",
self.task_execution_arn)
def _delete_datasync_task(self):
"""Deletes an AWS DataSync Task."""
hook = self.get_hook()
# Delete task:
self.log.info("Deleting Task with TaskArn %s", self.task_arn)
hook.delete_task(self.task_arn)
self.log.info("Task Deleted")
return self.task_arn
def _get_location_arns(self, location_uri):
location_arns = self.get_hook().get_location_arns(
location_uri
)
self.log.info(
"Found LocationArns %s for LocationUri %s", location_arns, location_uri
)
return location_arns
| apache-2.0 | 2,900,169,435,753,842,000 | 42.608808 | 104 | 0.641775 | false |
mivanov/editkit | editkit/ckeditor/tests/tests.py | 1 | 6356 | # coding=utf-8
import os
from django.test import TestCase
from django.db import models
from django.core import exceptions
from ckeditor.models import XHTMLField
from ckeditor.models import XMLField
from ckeditor.models import HTML5Field
from ckeditor.models import HTML5FragmentField
from ckeditor.widgets import CKEditor
class XHTMLModel(models.Model):
html = XHTMLField()
class HTML5Model(models.Model):
html = HTML5Field()
class HTML5FragmentModel(models.Model):
html = HTML5FragmentField()
class RestrictedHTML5FragmentModel(models.Model):
html = HTML5FragmentField(allowed_elements=['a', 'span'],
allowed_attributes_map={'a': ['href'],
'span': ['style']},
allowed_styles_map={'span': ['width']},
rename_elements={'div': 'span'})
class XHTMLFieldTest(TestCase):
def test_html_schema_set(self):
html = XHTMLField()
self.assertTrue(isinstance(html, XMLField))
self.assertEquals(html.schema_path, XHTMLField.schema_path)
def test_html_schema_exists(self):
self.assertTrue(os.path.exists(XHTMLField.schema_path))
def test_valid_html(self):
m = XHTMLModel()
m.html = ('<html><head><title>Lorem</title></head>'
'<body>Ipsum</body></html>')
m.clean_fields()
def test_invalid_html(self):
m = XHTMLModel()
m.html = 'invalid html'
self.assertRaises(exceptions.ValidationError, m.clean_fields)
class HTML5FieldTest(TestCase):
def test_sanitize(self):
m = HTML5Model()
m.html = '<html><head/><body><script/></body></html>'
m.clean_fields()
self.assertEquals(m.html,
('<html><head/><body><html><head/><body>'
'<script/></body></html></body></html>')
)
class HTML5FragmentField(TestCase):
def test_sanitize(self):
m = HTML5FragmentModel()
m.html = '<script/>'
m.clean_fields()
self.assertEquals(m.html, '<script/>')
def test_allowed_elements(self):
m = RestrictedHTML5FragmentModel()
m.html = '<p><a href="#top">This link</a> takes you to the top</p>'
m.clean_fields()
self.assertEquals(m.html, ('<p><a href="#top">This link</a>'
' takes you to the top</p>'))
def test_allowed_attributes(self):
m = RestrictedHTML5FragmentModel()
m.html = ('<span style="width: 300px;" class="myclass">'
'Click <a href="www.example.com" target="_top">here</a>'
'</span>')
m.clean_fields()
self.assertEquals(m.html, ('<span style="width: 300px;">'
'Click <a href="www.example.com">here</a></span>'))
def test_allowed_styles(self):
m = RestrictedHTML5FragmentModel()
m.html = ('<span style="width: 300px; height:100px">Blah</span>')
m.clean_fields()
self.assertEquals(m.html, '<span style="width: 300px;">Blah</span>')
def test_rename_elements(self):
m = RestrictedHTML5FragmentModel()
m.html = '<div>This should be a span</div>'
m.clean_fields()
self.assertEquals(m.html, '<span>This should be a span</span>')
def test_empty_a_element(self):
m = HTML5FragmentModel()
m.html = '<p><a name="test"></a></p>'
m.clean_fields()
self.assertEquals(m.html, '<p><a name="test"></a></p>')
def test_nbsp(self):
''' We store UTF-8, so should be stored as \xc2\xa0 (2 chars)
'''
m = HTML5FragmentModel()
m.html = '<p> </p> '
m.clean_fields()
self.assertEquals(m.html, '<p>\xc2\xa0</p>\xc2\xa0')
def test_charset(self):
m = HTML5FragmentModel()
m.html = '<p>Привет</p>'
m.clean_fields()
self.assertEquals(m.html, '<p>Привет</p>')
class CKEditorWidgetTest(TestCase):
def test_default_config(self):
ck = CKEditor()
rendered = ck.render("ck", "Test")
expected = ('<textarea rows="10" cols="40" name="ck">Test</textarea>'
'<script type="text/javascript">\n'
'<!--\n'
'CKEDITOR.basePath = \'/static/js/ckeditor/\';\n'
"CKEDITOR.replace('id_ck');\n"
'-->\n'
'</script>\n')
self.assertEqual(rendered, expected)
def test_config_based_on_allowed_tags(self):
ck = CKEditor(allowed_tags=['a'])
rendered = ck.render("ck", "Test")
expected = ('<textarea rows="10" cols="40" name="ck">Test</textarea>'
'<script type="text/javascript">\n'
'<!--\nCKEDITOR.basePath = \'/static/js/ckeditor/\';'
'\nCKEDITOR.replace(\'id_ck\', {"toolbar": [["Link",'
' "Unlink", "Anchor"]]});\n-->\n</script>\n'
)
self.assertEqual(rendered, expected)
def test_custom_config(self):
ck = CKEditor(ck_config={'extraPlugins': 'myThing'})
rendered = ck.render("ck", "Test")
expected = ('<textarea rows="10" cols="40" name="ck">Test</textarea>'
'<script type="text/javascript">\n'
'<!--\nCKEDITOR.basePath = \'/static/js/ckeditor/\';\n'
'CKEDITOR.replace(\'id_ck\', {"extraPlugins": "myThing"});'
'\n-->\n</script>\n')
self.assertEqual(rendered, expected)
class CustomCKEditor(CKEditor):
def get_extra_plugins(self):
plugins = ["myPlugin1", "myPlugin2"]
return ','.join(plugins)
class CustomCKEditorTest(TestCase):
def test_config(self):
ck = CustomCKEditor()
rendered = ck.render("ck", "Test")
expected = ('<textarea rows="10" cols="40" name="ck">Test</textarea>'
'<script type="text/javascript">\n'
'<!--\nCKEDITOR.basePath = \'/static/js/ckeditor/\';\n'
"CKEDITOR.replace('id_ck', "
'{"extraPlugins": "myPlugin1,myPlugin2"});\n'
'-->\n'
'</script>\n')
self.assertEqual(rendered, expected)
| gpl-2.0 | 8,648,883,103,976,645,000 | 35.045455 | 79 | 0.54918 | false |
uannight/reposan | plugin.video.tvalacarta/channels/vuittv.py | 1 | 7934 | # -*- coding: utf-8 -*-
#------------------------------------------------------------
# tvalacarta - XBMC Plugin
# Canal para 8TV
# http://blog.tvalacarta.info/plugin-xbmc/tvalacarta/
#------------------------------------------------------------
import re
import sys
import os
import traceback
import urllib2
from core import logger
from core import config
from core import scrapertools
from core.item import Item
from servers import servertools
__channel__ = "vuittv"
__category__ = "R"
__type__ = "generic"
__title__ = "8TV"
__language__ = "ES"
__creationdate__ = "20160928"
DEBUG = config.get_setting("debug")
URL_LIVE = "rtmp://streaming.8tv.cat:1935/8TV?videoId=3998198240001&lineUpId=&pubId=1589608506001&playerId=1982328835001&affiliateId=/8aldia-directe?videoId=3998198240001&lineUpId=&pubId=1589608506001&playerId=1982328835001&affiliateId="
def isGeneric():
return True
def mainlist(item):
logger.info("tvalacarta.channels.8tv mainlist")
itemlist = []
itemlist.append( Item(channel=__channel__, title="8tv directe", action="play", url = URL_LIVE, folder=False) )
itemlist.append( Item(channel=__channel__, title="8aldia Inici (destacat)", action="loadprogram", url = "http://www.8tv.cat/8aldia/", folder=True) )
itemlist.append( Item(channel=__channel__, title="8aldia Reflexió Cuní", action="loadprogram", url = "http://www.8tv.cat/8aldia/reflexio-de-josep-cuni/", folder=True) )
itemlist.append( Item(channel=__channel__, title="8aldia Seccions", action="loadsections", folder=True) )
itemlist.append( Item(channel=__channel__, title="8aldia Programes sencers", action="loadprogram", url = "http://www.8tv.cat/8aldia/programes-sencers/", folder=True) )
return itemlist
# Carga secciones
def loadsections(item):
logger.info("tvalacarta.channels.8tv loadsection")
itemlist = []
itemlist.append( Item(channel=__channel__, title="Entrevistes", action="loadprogram", url="http://www.8tv.cat/8aldia/category/entrevistes/", folder=True) )
itemlist.append( Item(channel=__channel__, title="Pilar Rahola", action="loadprogram", url="http://www.8tv.cat/8aldia/category/pilar-rahola/", folder=True) )
itemlist.append( Item(channel=__channel__, title="La Tertúlia", action="loadprogram", url="http://www.8tv.cat/8aldia/category/tertulia/", folder=True) )
itemlist.append( Item(channel=__channel__, title="Opinió", action="loadprogram", url="http://www.8tv.cat/8aldia/category/opinio/", folder=True) )
itemlist.append( Item(channel=__channel__, title="Política", action="loadprogram", url="http://www.8tv.cat/8aldia/category/politica/", folder=True) )
itemlist.append( Item(channel=__channel__, title="Internacional", action="loadprogram", url="http://www.8tv.cat/8aldia/category/internacional/", folder=True) )
itemlist.append( Item(channel=__channel__, title="Economia", action="loadprogram", url="http://www.8tv.cat/8aldia/category/economia-videos/", folder=True) )
itemlist.append( Item(channel=__channel__, title="Societat", action="loadprogram", url="http://www.8tv.cat/8aldia/category/societat/", folder=True) )
itemlist.append( Item(channel=__channel__, title="Successos", action="loadprogram", url="http://www.8tv.cat/8aldia/category/successos/", folder=True) )
itemlist.append( Item(channel=__channel__, title="Tribunals", action="loadprogram", url="http://www.8tv.cat/8aldia/category/tribunals/", folder=True) )
itemlist.append( Item(channel=__channel__, title="Cultura", action="loadprogram", url="http://www.8tv.cat/8aldia/category/cultura/", folder=True) )
itemlist.append( Item(channel=__channel__, title="Tecnologia", action="loadprogram", url="http://www.8tv.cat/8aldia/category/tecnologia/", folder=True) )
itemlist.append( Item(channel=__channel__, title="Esports", action="loadprogram", url="http://www.8tv.cat/8aldia/category/esports/", folder=True) )
return itemlist
# Carga programas de una sección
def loadprogram(item):
logger.info("tvalacarta.channels.8tv loadprogram")
return pager(item.url, item.channel, item)
# Genera listado de los videos con paginador
def pager(url, channel=__channel__, item=None):
logger.info("tvalacarta.channels.8tv pager")
try:
itemlist = []
data = scrapertools.downloadpage(url)
data = data.replace("\\\"","")
#logger.error("DATA: " + str(data))
# --------------------------------------------------------
# Extrae los videos (tag article)
# --------------------------------------------------------
patron = '<article class="entry-box entry-video (.*?)</article>'
matches = re.compile(patron,re.DOTALL).findall(data)
if len(matches) > 0:
for chapter in matches:
try:
#
# Ex: <h2 class="entry-title"><a href="http://www.8tv.cat/8aldia/videos/el-proxim-11-de-setembre-marcat-pel-referendum/" title="El pròxim 11 de Setembre, marcat pel referèndum">
#
patron = ' src="([^"]+)"'
matches = re.compile(patron,re.DOTALL).findall(chapter)
scrapedthumbnail = matches[0]
patron = '<h2 class="entry-title"><a href="([^"]+)" title="([^"]+)">'
matches = re.compile(patron,re.DOTALL).findall(chapter)
urlprog = matches[0][0]
scrapedtitle = matches[0][1]
date = scrapertools.find_single_match(chapter, '<time datetime="[^"]+" pubdate class="updated">(.*?) - [^<]+</time>')
# Añade al listado
itemlist.append(
Item(channel=channel,
action = 'play',
title = date.strip() + " - " + str(scrapedtitle).replace(""", "'").replace("“", "").replace("”", "").replace('“', "").replace('”', "").strip(),
url = urlprog,
thumbnail = scrapedthumbnail,
server = channel,
folder = False
)
)
except:
for line in sys.exc_info():
logger.error("tvalacarta.channels.8tv pager ERROR1: %s" % line)
# Extrae el paginador para la página siguiente
patron = "<a class="+"'"+"no_bg"+"'"+' href="([^"]+)">Següent</a>'
urlpager = re.compile(patron,re.DOTALL).findall(data)
#logger.info("URLPAGER: %s" % urlpager[0])
if len(urlpager)>0 :
next_page_item = Item(channel=channel,
action = 'loadprogram',
title = '>> Següent',
url = urlpager[0],
thumbnail = ''
)
itemlist.append(next_page_item)
except:
for line in sys.exc_info():
logger.error("tvalacarta.channels.8tv pager ERROR2: %s" % line)
return itemlist
# Reproduce el item con server propio
def play(item):
item.server = __channel__;
itemlist = [item]
return itemlist
# Verificación automática de canales: Esta función debe devolver "True" si todo está ok en el canal.
def test():
# Comprueba que la primera opción tenga algo
items = mainlist(Item())
section = loadsections(items[1])
if len(section)==0:
return False,"No hay videos en portada"
section = loadprogram(items[4])
if len(section)==0:
return False,"No hay videos en 8aldia"
return True,""
| gpl-2.0 | 5,244,642,386,767,732,000 | 45.274854 | 238 | 0.573739 | false |
almc/nao_basic | scripts/greedy_rrt.py | 1 | 8935 | #!/usr/bin/env python
import numpy as np
import sys, random, math, pygame
from pygame.locals import *
from math import sqrt,cos,sin,atan2
# constants
DIM, DIM_X, DIM_Y = 2, 640, 480 # range 0 to 640, and 0 to 480
WINSIZE = [DIM_X, DIM_Y] # could be any range for each var
# parameters
NUMNODES = 2000
DELTA_IN = np.array([0, 0]) # 15
DELTA_UT = np.array([20, 20]) # 20
PEN_DIST_EXP = 5
PEN_DIST_OBS = 8
EXP_DIST = 5
GOAL_DIST = 10
# EPSILON_X = 20.0
# EPSILON_Y = 15.0
# DELTA_X = 10.0
# DELTA_Y = 7.0
# PEN_EXPANSION = 0.9
# PEN_INVALID = 0.1
# PATCH_VALID = np.array([])
# PATCH_INVALID = np.array([])
def dist(p1, p2):
return np.linalg.norm(p1[0:2]-p2[0:2])
def raw_score(node1, node2):
return 1.0/dist(node1, node2)
def generate_random():
rand = np.array([np.random.uniform(DELTA_IN[0], DELTA_UT[0], 1)[0],
np.random.uniform(DELTA_IN[1], DELTA_UT[1], 1)[0], 0])
sign = np.random.uniform(0, 1, 2)
for r_i, r in enumerate(rand[:-1]):
if sign[r_i] >= 0.5:
rand[r_i] *= -1
return rand
# def avg_score(nn, nodes):
# box = pygame.Rect(nn[0], nn[1], int(DELTA_X), int(DELTA_Y))
# delta_n_counter = 0
# delta_s_counter = 0
# for n_i, n in enumerate(nodes):
# if box.collidepoint(n[0], n[1]): # check delta neighbourhood
# delta_n_counter += 1
# delta_s_counter += n[2]
# print ">>>>>>>>>>>>>>FOUND THIS MANY NODES IN THE NEIGHBOURHOOD", delta_n_counter
# if delta_n_counter >= 1: # could be a parameter
# avg_score = delta_s_counter / delta_n_counter
# return 0.5*nn[2] + 0.5*avg_score # could be two parameters
# else:
# return nn[2]
def check_collision(node, obsta):
for o in obsta: # check node collision with every obstacle
if (o.collidepoint(node[0], node[1])):
return True # return as soon as one of them is true
return False # if no obstacle collides return false
def check_unexplored(nn, nodes):
for n_i, n in enumerate(nodes):
d = dist(nn, n)
if d < EXP_DIST:
# print "explored node, rejected node, distance", n, nn, d
return False
return True
def check_goal(nn, goals):
if dist(nn, goals) < GOAL_DIST: return True
else: return False
def draw_obsta(screen, obsta, color):
for o in obsta:
pygame.draw.rect(screen, color, o, 0)
def draw_nodes(screen, nodes, color, node_radius):
for n in nodes:
pygame.draw.circle(screen, color, (int(n[0]),int(n[1])), node_radius, 2)
def draw_goals(screen, goals, color, node_radius):
for g in goals:
pygame.draw.circle(screen, color, (int(g[0]),int(g[1])), node_radius, 2)
def penalize_nodes(nn, nodes, pen_dist):
for n_i, n in enumerate(nodes):
d = dist(nn, n)
if d < pen_dist:
# print "penalizing node", n_i, nodes[n_i][2]
nodes[n_i][2] *= d/pen_dist
# print "score after penalization", nodes[n_i][2]
return nodes
def organize_nodes(nodes):
# nodes = np.sort(nodes, axis=0)[::-1]
temp = nodes[nodes[:,2].argsort()]
nodes = temp[::-1]
# print "nodes organized\n", nodes
return nodes
def insert_node(nn, nodes):
flag_inserted = False
for p_i, p in enumerate(nodes):
if nn[2] > p[2]: # if avg_score of new node is higher than p_i
# print "adding node", nn
nodes = np.insert(nodes, [p_i], nn, 0)
flag_inserted = True
break
if flag_inserted == False:
# print "score is worse than others"
# print nodes
# print nn
nodes = np.append(nodes, [nn], 0)
return nodes
def generate_newnode(nodes):
rand = generate_random()
nn = nodes[0] + rand # new node, carefull with third coord still has prev raw_score
# nodes = penalize_nodes(nn, nodes, PEN_DIST_EXP) # penalize nodes closer than PEN_DIST
# nodes = organize_nodes(nodes)
scaling_factor = 1.01
while not check_unexplored(nn, nodes):
# penalize node for being close to other nodes
# print "penalizing node", nodes[0][2]
nodes = penalize_nodes(nn, nodes, PEN_DIST_EXP) # penalize nodes closer than PEN_DIST
nodes = organize_nodes(nodes)
if (scaling_factor >= 2):
scaling_factor = 2
rand = scaling_factor*generate_random()
nn = nodes[0] + rand
scaling_factor *= scaling_factor
# nodes[0][2] *= 0.5
# nodes = organize_nodes(nodes)
# print "score after penalization", nodes[0][2]
# generate another node
# rand = generate_random()
# nn = nodes[0] + rand # new node, carefull with third coord still has prev raw_score
# print "newnode", nn
return nn, nodes
## MAIN ##
def main():
np.set_printoptions(precision=10, suppress=True)
pygame.init()
pygame.display.set_caption('RRT mod - Alejandro Marzinotto - June 2014')
screen = pygame.display.set_mode(WINSIZE)
white = (255, 240, 200)
black = ( 20, 20, 40)
red = (192, 0, 0)
green = ( 0, 192, 0)
blue = ( 0, 0, 192)
yellow = (192, 192, 0)
node_radius = 3
screen.fill(black)
# variables
goals = np.array([[DIM_X/2.0,DIM_Y*1.0/12.0, 1.0]]) # goal importance
nodes = np.array([[DIM_X/2.0,DIM_Y*3.0/ 4.0, 0.0]]) # node raw_score
nodes[0][2] = raw_score(nodes[0], goals[0])
x, y = DIM_X*1.0/8.0, DIM_Y*1.0/8.0
# obsta = [pygame.Rect(x, y, 380, 100), pygame.Rect(x, y, 100, 250)]
# obsta = [pygame.Rect(x, y, 380, 100), pygame.Rect(x, y, 100, 250),
# pygame.Rect(x+400, y, 100, 250)]
# obsta = [pygame.Rect(x, y, 390, 100), pygame.Rect(x, y, 100, 250),
# pygame.Rect(x+400, y, 100, 250)]
obsta = [pygame.Rect(100, 100, 200, 40),
pygame.Rect(310, 100, 200, 40),
pygame.Rect(200, 200, 300, 40),
pygame.Rect(100, 300, 300, 40)]
invalid = check_collision(nodes[0], obsta)
assert invalid == False, "The initial pose is in a collision state"
draw_obsta(screen, obsta, red)
draw_nodes(screen, nodes, white, node_radius)
draw_goals(screen, goals, green, node_radius)
pygame.display.update()
a=raw_input()
for i in range(NUMNODES): # assumes that node[0] has the highest score
# print ">>>>>>>>>>expansion number:", i, "node:", nodes[0]
# a=raw_input()
[nn, nodes] = generate_newnode(nodes)
# raw_input()
# revisar que el nodo generado no caiga dentro de patched area.
# si cae dentro, penalizar todos los nodos involucrados, ordenar los nodos,
# extraer el mejor y volver a expandir. (hasta que salgamos del problema)
cn_i, cn = 0, nodes[0] # closest node hypothesis
for p_i, p in enumerate(nodes):
if dist(p, nn) < dist(cn, nn):
cn_i, cn = p_i, p
# print "closest node found:", cn, cn_i
# print "nodes before check_collision\n", nodes
if check_collision(nn, obsta):
# print ">>> in-valid node, penalizing"
nodes = penalize_nodes(nn, nodes, PEN_DIST_OBS) # penalize nodes closer than PEN_DIST
nodes = organize_nodes(nodes)
pygame.draw.circle(screen, blue, (int(nn[0]),int(nn[1])), node_radius, 2)
# print "nodes after check_collision\n", nodes
# a=raw_input()
else:
# print ">>> valid node, scoring"
# print goals[0]
# print nn
# print "***************"
nn[2] = raw_score(nn, goals[0]) # overwriting raw_score of node who generated it
# nn[2] = avg_score(nn, nodes) # overwriting raw_score of the goal heuristic
pygame.draw.line(screen, white, nodes[0][0:2], nn[0:2])
nodes = insert_node(nn, nodes)
# print "new node list:", nodes
pygame.draw.circle(screen, yellow, (int(nn[0]),int(nn[1])), node_radius, 2)
# print "nodes after check_collision\n", nodes
pygame.display.update()
if check_goal(nn, goals[0]):
print "found path, finishing"
break
# for i in range(NUMNODES):
# rand = random.random()*640.0, random.random()*480.0
# nn = nodes[0]
# for p in nodes:
# if dist(p,rand) < dist(nn,rand):
# nn = p
# newnode = step_from_to(nn,rand)
# nodes.append(newnode)
# pygame.draw.line(screen,white,nn,newnode)
# pygame.display.update()
# #print i, " ", nodes
# for e in pygame.event.get():
# if e.type == QUIT or (e.type == KEYUP and e.key == K_ESCAPE):
# sys.exit("Leaving because you requested it.")
# if python says run, then we should run
if __name__ == '__main__':
main()
| gpl-2.0 | -7,462,432,645,537,458,000 | 33.498069 | 97 | 0.566536 | false |
wzhfy/spark | python/pyspark/sql/tests/test_pandas_udf_grouped_agg.py | 1 | 20739 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
from pyspark.rdd import PythonEvalType
from pyspark.sql import Row
from pyspark.sql.functions import array, explode, col, lit, mean, sum, \
udf, pandas_udf, PandasUDFType
from pyspark.sql.types import *
from pyspark.sql.utils import AnalysisException
from pyspark.testing.sqlutils import ReusedSQLTestCase, have_pandas, have_pyarrow, \
pandas_requirement_message, pyarrow_requirement_message
from pyspark.testing.utils import QuietTest
if have_pandas:
import pandas as pd
from pandas.util.testing import assert_frame_equal
@unittest.skipIf(
not have_pandas or not have_pyarrow,
pandas_requirement_message or pyarrow_requirement_message)
class GroupedAggPandasUDFTests(ReusedSQLTestCase):
@property
def data(self):
return self.spark.range(10).toDF('id') \
.withColumn("vs", array([lit(i * 1.0) + col('id') for i in range(20, 30)])) \
.withColumn("v", explode(col('vs'))) \
.drop('vs') \
.withColumn('w', lit(1.0))
@property
def python_plus_one(self):
@udf('double')
def plus_one(v):
assert isinstance(v, (int, float))
return v + 1
return plus_one
@property
def pandas_scalar_plus_two(self):
@pandas_udf('double', PandasUDFType.SCALAR)
def plus_two(v):
assert isinstance(v, pd.Series)
return v + 2
return plus_two
@property
def pandas_agg_mean_udf(self):
@pandas_udf('double', PandasUDFType.GROUPED_AGG)
def avg(v):
return v.mean()
return avg
@property
def pandas_agg_sum_udf(self):
@pandas_udf('double', PandasUDFType.GROUPED_AGG)
def sum(v):
return v.sum()
return sum
@property
def pandas_agg_weighted_mean_udf(self):
import numpy as np
@pandas_udf('double', PandasUDFType.GROUPED_AGG)
def weighted_mean(v, w):
return np.average(v, weights=w)
return weighted_mean
def test_manual(self):
df = self.data
sum_udf = self.pandas_agg_sum_udf
mean_udf = self.pandas_agg_mean_udf
mean_arr_udf = pandas_udf(
self.pandas_agg_mean_udf.func,
ArrayType(self.pandas_agg_mean_udf.returnType),
self.pandas_agg_mean_udf.evalType)
result1 = df.groupby('id').agg(
sum_udf(df.v),
mean_udf(df.v),
mean_arr_udf(array(df.v))).sort('id')
expected1 = self.spark.createDataFrame(
[[0, 245.0, 24.5, [24.5]],
[1, 255.0, 25.5, [25.5]],
[2, 265.0, 26.5, [26.5]],
[3, 275.0, 27.5, [27.5]],
[4, 285.0, 28.5, [28.5]],
[5, 295.0, 29.5, [29.5]],
[6, 305.0, 30.5, [30.5]],
[7, 315.0, 31.5, [31.5]],
[8, 325.0, 32.5, [32.5]],
[9, 335.0, 33.5, [33.5]]],
['id', 'sum(v)', 'avg(v)', 'avg(array(v))'])
assert_frame_equal(expected1.toPandas(), result1.toPandas())
def test_basic(self):
df = self.data
weighted_mean_udf = self.pandas_agg_weighted_mean_udf
# Groupby one column and aggregate one UDF with literal
result1 = df.groupby('id').agg(weighted_mean_udf(df.v, lit(1.0))).sort('id')
expected1 = df.groupby('id').agg(mean(df.v).alias('weighted_mean(v, 1.0)')).sort('id')
assert_frame_equal(expected1.toPandas(), result1.toPandas())
# Groupby one expression and aggregate one UDF with literal
result2 = df.groupby((col('id') + 1)).agg(weighted_mean_udf(df.v, lit(1.0)))\
.sort(df.id + 1)
expected2 = df.groupby((col('id') + 1))\
.agg(mean(df.v).alias('weighted_mean(v, 1.0)')).sort(df.id + 1)
assert_frame_equal(expected2.toPandas(), result2.toPandas())
# Groupby one column and aggregate one UDF without literal
result3 = df.groupby('id').agg(weighted_mean_udf(df.v, df.w)).sort('id')
expected3 = df.groupby('id').agg(mean(df.v).alias('weighted_mean(v, w)')).sort('id')
assert_frame_equal(expected3.toPandas(), result3.toPandas())
# Groupby one expression and aggregate one UDF without literal
result4 = df.groupby((col('id') + 1).alias('id'))\
.agg(weighted_mean_udf(df.v, df.w))\
.sort('id')
expected4 = df.groupby((col('id') + 1).alias('id'))\
.agg(mean(df.v).alias('weighted_mean(v, w)'))\
.sort('id')
assert_frame_equal(expected4.toPandas(), result4.toPandas())
def test_unsupported_types(self):
with QuietTest(self.sc):
with self.assertRaisesRegexp(NotImplementedError, 'not supported'):
pandas_udf(
lambda x: x,
ArrayType(ArrayType(TimestampType())),
PandasUDFType.GROUPED_AGG)
with QuietTest(self.sc):
with self.assertRaisesRegexp(NotImplementedError, 'not supported'):
@pandas_udf('mean double, std double', PandasUDFType.GROUPED_AGG)
def mean_and_std_udf(v):
return v.mean(), v.std()
with QuietTest(self.sc):
with self.assertRaisesRegexp(NotImplementedError, 'not supported'):
@pandas_udf(MapType(DoubleType(), DoubleType()), PandasUDFType.GROUPED_AGG)
def mean_and_std_udf(v):
return {v.mean(): v.std()}
def test_alias(self):
df = self.data
mean_udf = self.pandas_agg_mean_udf
result1 = df.groupby('id').agg(mean_udf(df.v).alias('mean_alias'))
expected1 = df.groupby('id').agg(mean(df.v).alias('mean_alias'))
assert_frame_equal(expected1.toPandas(), result1.toPandas())
def test_mixed_sql(self):
"""
Test mixing group aggregate pandas UDF with sql expression.
"""
df = self.data
sum_udf = self.pandas_agg_sum_udf
# Mix group aggregate pandas UDF with sql expression
result1 = (df.groupby('id')
.agg(sum_udf(df.v) + 1)
.sort('id'))
expected1 = (df.groupby('id')
.agg(sum(df.v) + 1)
.sort('id'))
# Mix group aggregate pandas UDF with sql expression (order swapped)
result2 = (df.groupby('id')
.agg(sum_udf(df.v + 1))
.sort('id'))
expected2 = (df.groupby('id')
.agg(sum(df.v + 1))
.sort('id'))
# Wrap group aggregate pandas UDF with two sql expressions
result3 = (df.groupby('id')
.agg(sum_udf(df.v + 1) + 2)
.sort('id'))
expected3 = (df.groupby('id')
.agg(sum(df.v + 1) + 2)
.sort('id'))
assert_frame_equal(expected1.toPandas(), result1.toPandas())
assert_frame_equal(expected2.toPandas(), result2.toPandas())
assert_frame_equal(expected3.toPandas(), result3.toPandas())
def test_mixed_udfs(self):
"""
Test mixing group aggregate pandas UDF with python UDF and scalar pandas UDF.
"""
df = self.data
plus_one = self.python_plus_one
plus_two = self.pandas_scalar_plus_two
sum_udf = self.pandas_agg_sum_udf
# Mix group aggregate pandas UDF and python UDF
result1 = (df.groupby('id')
.agg(plus_one(sum_udf(df.v)))
.sort('id'))
expected1 = (df.groupby('id')
.agg(plus_one(sum(df.v)))
.sort('id'))
# Mix group aggregate pandas UDF and python UDF (order swapped)
result2 = (df.groupby('id')
.agg(sum_udf(plus_one(df.v)))
.sort('id'))
expected2 = (df.groupby('id')
.agg(sum(plus_one(df.v)))
.sort('id'))
# Mix group aggregate pandas UDF and scalar pandas UDF
result3 = (df.groupby('id')
.agg(sum_udf(plus_two(df.v)))
.sort('id'))
expected3 = (df.groupby('id')
.agg(sum(plus_two(df.v)))
.sort('id'))
# Mix group aggregate pandas UDF and scalar pandas UDF (order swapped)
result4 = (df.groupby('id')
.agg(plus_two(sum_udf(df.v)))
.sort('id'))
expected4 = (df.groupby('id')
.agg(plus_two(sum(df.v)))
.sort('id'))
# Wrap group aggregate pandas UDF with two python UDFs and use python UDF in groupby
result5 = (df.groupby(plus_one(df.id))
.agg(plus_one(sum_udf(plus_one(df.v))))
.sort('plus_one(id)'))
expected5 = (df.groupby(plus_one(df.id))
.agg(plus_one(sum(plus_one(df.v))))
.sort('plus_one(id)'))
# Wrap group aggregate pandas UDF with two scala pandas UDF and user scala pandas UDF in
# groupby
result6 = (df.groupby(plus_two(df.id))
.agg(plus_two(sum_udf(plus_two(df.v))))
.sort('plus_two(id)'))
expected6 = (df.groupby(plus_two(df.id))
.agg(plus_two(sum(plus_two(df.v))))
.sort('plus_two(id)'))
assert_frame_equal(expected1.toPandas(), result1.toPandas())
assert_frame_equal(expected2.toPandas(), result2.toPandas())
assert_frame_equal(expected3.toPandas(), result3.toPandas())
assert_frame_equal(expected4.toPandas(), result4.toPandas())
assert_frame_equal(expected5.toPandas(), result5.toPandas())
assert_frame_equal(expected6.toPandas(), result6.toPandas())
def test_multiple_udfs(self):
"""
Test multiple group aggregate pandas UDFs in one agg function.
"""
df = self.data
mean_udf = self.pandas_agg_mean_udf
sum_udf = self.pandas_agg_sum_udf
weighted_mean_udf = self.pandas_agg_weighted_mean_udf
result1 = (df.groupBy('id')
.agg(mean_udf(df.v),
sum_udf(df.v),
weighted_mean_udf(df.v, df.w))
.sort('id')
.toPandas())
expected1 = (df.groupBy('id')
.agg(mean(df.v),
sum(df.v),
mean(df.v).alias('weighted_mean(v, w)'))
.sort('id')
.toPandas())
assert_frame_equal(expected1, result1)
def test_complex_groupby(self):
df = self.data
sum_udf = self.pandas_agg_sum_udf
plus_one = self.python_plus_one
plus_two = self.pandas_scalar_plus_two
# groupby one expression
result1 = df.groupby(df.v % 2).agg(sum_udf(df.v))
expected1 = df.groupby(df.v % 2).agg(sum(df.v))
# empty groupby
result2 = df.groupby().agg(sum_udf(df.v))
expected2 = df.groupby().agg(sum(df.v))
# groupby one column and one sql expression
result3 = df.groupby(df.id, df.v % 2).agg(sum_udf(df.v)).orderBy(df.id, df.v % 2)
expected3 = df.groupby(df.id, df.v % 2).agg(sum(df.v)).orderBy(df.id, df.v % 2)
# groupby one python UDF
result4 = df.groupby(plus_one(df.id)).agg(sum_udf(df.v))
expected4 = df.groupby(plus_one(df.id)).agg(sum(df.v))
# groupby one scalar pandas UDF
result5 = df.groupby(plus_two(df.id)).agg(sum_udf(df.v)).sort('sum(v)')
expected5 = df.groupby(plus_two(df.id)).agg(sum(df.v)).sort('sum(v)')
# groupby one expression and one python UDF
result6 = df.groupby(df.v % 2, plus_one(df.id)).agg(sum_udf(df.v))
expected6 = df.groupby(df.v % 2, plus_one(df.id)).agg(sum(df.v))
# groupby one expression and one scalar pandas UDF
result7 = (df.groupby(df.v % 2, plus_two(df.id))
.agg(sum_udf(df.v)).sort(['sum(v)', 'plus_two(id)']))
expected7 = (df.groupby(df.v % 2, plus_two(df.id))
.agg(sum(df.v)).sort(['sum(v)', 'plus_two(id)']))
assert_frame_equal(expected1.toPandas(), result1.toPandas())
assert_frame_equal(expected2.toPandas(), result2.toPandas())
assert_frame_equal(expected3.toPandas(), result3.toPandas())
assert_frame_equal(expected4.toPandas(), result4.toPandas())
assert_frame_equal(expected5.toPandas(), result5.toPandas())
assert_frame_equal(expected6.toPandas(), result6.toPandas())
assert_frame_equal(expected7.toPandas(), result7.toPandas())
def test_complex_expressions(self):
df = self.data
plus_one = self.python_plus_one
plus_two = self.pandas_scalar_plus_two
sum_udf = self.pandas_agg_sum_udf
# Test complex expressions with sql expression, python UDF and
# group aggregate pandas UDF
result1 = (df.withColumn('v1', plus_one(df.v))
.withColumn('v2', df.v + 2)
.groupby(df.id, df.v % 2)
.agg(sum_udf(col('v')),
sum_udf(col('v1') + 3),
sum_udf(col('v2')) + 5,
plus_one(sum_udf(col('v1'))),
sum_udf(plus_one(col('v2'))))
.sort(['id', '(v % 2)'])
.toPandas().sort_values(by=['id', '(v % 2)']))
expected1 = (df.withColumn('v1', df.v + 1)
.withColumn('v2', df.v + 2)
.groupby(df.id, df.v % 2)
.agg(sum(col('v')),
sum(col('v1') + 3),
sum(col('v2')) + 5,
plus_one(sum(col('v1'))),
sum(plus_one(col('v2'))))
.sort(['id', '(v % 2)'])
.toPandas().sort_values(by=['id', '(v % 2)']))
# Test complex expressions with sql expression, scala pandas UDF and
# group aggregate pandas UDF
result2 = (df.withColumn('v1', plus_one(df.v))
.withColumn('v2', df.v + 2)
.groupby(df.id, df.v % 2)
.agg(sum_udf(col('v')),
sum_udf(col('v1') + 3),
sum_udf(col('v2')) + 5,
plus_two(sum_udf(col('v1'))),
sum_udf(plus_two(col('v2'))))
.sort(['id', '(v % 2)'])
.toPandas().sort_values(by=['id', '(v % 2)']))
expected2 = (df.withColumn('v1', df.v + 1)
.withColumn('v2', df.v + 2)
.groupby(df.id, df.v % 2)
.agg(sum(col('v')),
sum(col('v1') + 3),
sum(col('v2')) + 5,
plus_two(sum(col('v1'))),
sum(plus_two(col('v2'))))
.sort(['id', '(v % 2)'])
.toPandas().sort_values(by=['id', '(v % 2)']))
# Test sequential groupby aggregate
result3 = (df.groupby('id')
.agg(sum_udf(df.v).alias('v'))
.groupby('id')
.agg(sum_udf(col('v')))
.sort('id')
.toPandas())
expected3 = (df.groupby('id')
.agg(sum(df.v).alias('v'))
.groupby('id')
.agg(sum(col('v')))
.sort('id')
.toPandas())
assert_frame_equal(expected1, result1)
assert_frame_equal(expected2, result2)
assert_frame_equal(expected3, result3)
def test_retain_group_columns(self):
with self.sql_conf({"spark.sql.retainGroupColumns": False}):
df = self.data
sum_udf = self.pandas_agg_sum_udf
result1 = df.groupby(df.id).agg(sum_udf(df.v))
expected1 = df.groupby(df.id).agg(sum(df.v))
assert_frame_equal(expected1.toPandas(), result1.toPandas())
def test_array_type(self):
df = self.data
array_udf = pandas_udf(lambda x: [1.0, 2.0], 'array<double>', PandasUDFType.GROUPED_AGG)
result1 = df.groupby('id').agg(array_udf(df['v']).alias('v2'))
self.assertEquals(result1.first()['v2'], [1.0, 2.0])
def test_invalid_args(self):
df = self.data
plus_one = self.python_plus_one
mean_udf = self.pandas_agg_mean_udf
with QuietTest(self.sc):
with self.assertRaisesRegexp(
AnalysisException,
'nor.*aggregate function'):
df.groupby(df.id).agg(plus_one(df.v)).collect()
with QuietTest(self.sc):
with self.assertRaisesRegexp(
AnalysisException,
'aggregate function.*argument.*aggregate function'):
df.groupby(df.id).agg(mean_udf(mean_udf(df.v))).collect()
with QuietTest(self.sc):
with self.assertRaisesRegexp(
AnalysisException,
'mixture.*aggregate function.*group aggregate pandas UDF'):
df.groupby(df.id).agg(mean_udf(df.v), mean(df.v)).collect()
def test_register_vectorized_udf_basic(self):
sum_pandas_udf = pandas_udf(
lambda v: v.sum(), "integer", PythonEvalType.SQL_GROUPED_AGG_PANDAS_UDF)
self.assertEqual(sum_pandas_udf.evalType, PythonEvalType.SQL_GROUPED_AGG_PANDAS_UDF)
group_agg_pandas_udf = self.spark.udf.register("sum_pandas_udf", sum_pandas_udf)
self.assertEqual(group_agg_pandas_udf.evalType, PythonEvalType.SQL_GROUPED_AGG_PANDAS_UDF)
q = "SELECT sum_pandas_udf(v1) FROM VALUES (3, 0), (2, 0), (1, 1) tbl(v1, v2) GROUP BY v2"
actual = sorted(map(lambda r: r[0], self.spark.sql(q).collect()))
expected = [1, 5]
self.assertEqual(actual, expected)
def test_grouped_with_empty_partition(self):
data = [Row(id=1, x=2), Row(id=1, x=3), Row(id=2, x=4)]
expected = [Row(id=1, sum=5), Row(id=2, x=4)]
num_parts = len(data) + 1
df = self.spark.createDataFrame(self.sc.parallelize(data, numSlices=num_parts))
f = pandas_udf(lambda x: x.sum(),
'int', PandasUDFType.GROUPED_AGG)
result = df.groupBy('id').agg(f(df['x']).alias('sum')).collect()
self.assertEqual(result, expected)
def test_grouped_without_group_by_clause(self):
@pandas_udf('double', PandasUDFType.GROUPED_AGG)
def max_udf(v):
return v.max()
df = self.spark.range(0, 100)
self.spark.udf.register('max_udf', max_udf)
with self.tempView("table"):
df.createTempView('table')
agg1 = df.agg(max_udf(df['id']))
agg2 = self.spark.sql("select max_udf(id) from table")
assert_frame_equal(agg1.toPandas(), agg2.toPandas())
def test_no_predicate_pushdown_through(self):
# SPARK-30921: We should not pushdown predicates of PythonUDFs through Aggregate.
import numpy as np
@pandas_udf('float', PandasUDFType.GROUPED_AGG)
def mean(x):
return np.mean(x)
df = self.spark.createDataFrame([
Row(id=1, foo=42), Row(id=2, foo=1), Row(id=2, foo=2)
])
agg = df.groupBy('id').agg(mean('foo').alias("mean"))
filtered = agg.filter(agg['mean'] > 40.0)
assert(filtered.collect()[0]["mean"] == 42.0)
if __name__ == "__main__":
from pyspark.sql.tests.test_pandas_udf_grouped_agg import * # noqa: F401
try:
import xmlrunner
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports', verbosity=2)
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
| apache-2.0 | -4,292,672,378,173,092,400 | 38.882692 | 98 | 0.538695 | false |
prateeksan/python-design-patterns | behavioural/chain_of_responsibility.py | 1 | 3358 | """ The Chain of Responsibility Pattern
Notes:
The Chain of Responsibility pattern allows the client programmer to dynamically
create a recursive chain of objects - each of which tries to fulfill a
'responsibility' (usually represented by a method call). If an object in the
chain is unable to fulfill it, the request propagates to the next level of the
chain until it can be fulfilled. This chain is usually implemented as a linked
list (but it can be implemented using other iterable structures).
In the following example, we simulate a service that searches for a job
candidate from several pools of candidates. The pools are categorized by
geographical clusters (local/regional/global) and we assume that the user of
this service wants to find the nearest candidate (in the smallest cluster)
that meets all requirements. The chain of responsibility will thus be a linked
list of the pools which the user will recursively check (smallest to largest)
in order to find a good candidate.
"""
class AbstractPool:
"""The interface for the pool classes. All pools inherit from this."""
candidates = []
def __init__(self, successor_pool=None):
"""Note how each pool object can store a pointer to a successor_pool.
If no such pointer is assigned, we assume that is the last pool in the
chain.
"""
self._successor = successor_pool
def get_match(self, params):
"""If a match is found in the pool of candidates, the candidate is
returned, else the responsibility is propagated to the next pool in the
chain.
"""
match = self._find(params)
if match:
return match
elif self._successor:
return self._successor.get_match(params)
def _find(self, params):
"""Returns the first matching candidate in the pool if a match is found.
The exact implementation of this method is irrelevant to the concept of
the pattern. It may also be implemented differently for each pool.
"""
for candidate in self.__class__.candidates:
if all(key in candidate.items() for key in params.items()):
print("> Match found in {}:".format(self.__class__.__name__))
return candidate
print("> No match found in {}.".format(self.__class__.__name__))
class LocalPool(AbstractPool):
candidates = [
{"id": 12, "type": "developer", "level": "intermediate"},
{"id": 21, "type": "analyst", "level": "junior"}
]
class RegionalPool(AbstractPool):
candidates = [
{"id": 123, "type": "project_manager", "level": "intermediate"},
{"id": 321, "type": "designer", "level": "intermediate"}
]
class GlobalPool(AbstractPool):
candidates = [
# The following candidate is the only one that matches the needs.
{"id": 1234, "type": "developer", "level": "senior"},
{"id": 4321, "type": "designer", "level": "senior"}
]
if __name__ == "__main__":
# Setting up recursive propagation in this order: local > regional > global.
global_pool = GlobalPool()
regional_pool = RegionalPool(global_pool)
local_pool = LocalPool(regional_pool)
print("Searching for a senior developer in the pools chain:")
print(local_pool.get_match({"type": "developer", "level": "senior"})) | mit | -5,352,763,461,890,104,000 | 36.741573 | 80 | 0.662001 | false |
bq/web2board | src/platformio/util.py | 1 | 13808 | # Copyright 2014-2015 Ivan Kravets <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import functools
import json
import os
import re
import subprocess
import sys
import tempfile
from glob import glob
from os.path import (abspath, basename, dirname, expanduser, isdir, isfile,
join, realpath)
from platform import system, uname
from threading import Thread
from libs import utils
from libs.PathsManager import PathsManager
from platformio import __apiurl__, __version__, exception
# pylint: disable=wrong-import-order
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import ConfigParser
class AsyncPipe(Thread):
def __init__(self, outcallback=None):
Thread.__init__(self)
self.outcallback = outcallback
self._fd_read, self._fd_write = os.pipe()
self._pipe_reader = os.fdopen(self._fd_read)
self._buffer = []
self.start()
def get_buffer(self):
return self._buffer
def fileno(self):
return self._fd_write
def run(self):
for line in iter(self._pipe_reader.readline, ""):
line = line.strip()
self._buffer.append(line)
if self.outcallback:
self.outcallback(line)
else:
print line
self._pipe_reader.close()
def close(self):
os.close(self._fd_write)
self.join()
class cd(object):
def __init__(self, new_path):
self.new_path = new_path
self.prev_path = os.getcwd()
def __enter__(self):
os.chdir(self.new_path)
def __exit__(self, etype, value, traceback):
os.chdir(self.prev_path)
class memoized(object):
'''
Decorator. Caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated).
https://wiki.python.org/moin/PythonDecoratorLibrary#Memoize
'''
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
if not isinstance(args, collections.Hashable):
# uncacheable. a list, for instance.
# better to not cache than blow up.
return self.func(*args)
if args in self.cache:
return self.cache[args]
else:
value = self.func(*args)
self.cache[args] = value
return value
def __repr__(self):
'''Return the function's docstring.'''
return self.func.__doc__
def __get__(self, obj, objtype):
'''Support instance methods.'''
return functools.partial(self.__call__, obj)
def singleton(cls):
""" From PEP-318 http://www.python.org/dev/peps/pep-0318/#examples """
_instances = {}
def get_instance(*args, **kwargs):
if cls not in _instances:
_instances[cls] = cls(*args, **kwargs)
return _instances[cls]
return get_instance
def get_systype():
data = uname()
type_ = data[0].lower()
arch = data[4].lower() if data[4] else ""
return "%s_%s" % (type_, arch) if arch else type_
def pioversion_to_intstr():
vermatch = re.match(r"^([\d\.]+)", __version__)
assert vermatch
return [int(i) for i in vermatch.group(1).split(".")[:3]]
def _get_projconf_option_dir(name, default=None):
_env_name = "PLATFORMIO_%s" % name.upper()
if _env_name in os.environ:
return os.getenv(_env_name)
try:
config = get_project_config()
if (config.has_section("platformio") and
config.has_option("platformio", name)):
option_dir = config.get("platformio", name)
if option_dir.startswith("~"):
option_dir = expanduser(option_dir)
return abspath(option_dir)
except exception.NotPlatformProject:
pass
return default
def get_home_dir():
return PathsManager.PLATFORMIO_PACKAGES_PATH # [JORGE_GARCIA] modified optimization
home_dir = _get_projconf_option_dir(
"home_dir",
join(expanduser("~"), ".platformio")
)
if not isdir(home_dir):
os.makedirs(home_dir)
assert isdir(home_dir)
return home_dir
def get_lib_dir():
return _get_projconf_option_dir(
"lib_dir",
join(get_home_dir(), "lib")
)
def get_source_dir():
if utils.is_mac():
return PathsManager.RES_PLATFORMIO_PATH
else:
return dirname(realpath(__file__))
def get_project_dir():
return os.getcwd()
def get_projectsrc_dir():
return _get_projconf_option_dir(
"src_dir",
join(get_project_dir(), "src")
)
def get_projectlib_dir():
return join(get_project_dir(), "lib")
def get_pioenvs_dir():
return _get_projconf_option_dir(
"envs_dir",
join(get_project_dir(), ".pioenvs")
)
def get_project_config():
path = join(get_project_dir(), "platformio.ini")
if not isfile(path):
raise exception.NotPlatformProject(get_project_dir())
cp = ConfigParser()
cp.read(path)
return cp
def change_filemtime(path, time):
os.utime(path, (time, time))
def is_ci():
return os.getenv("CI", "").lower() == "true"
def exec_command(*args, **kwargs):
result = {
"out": None,
"err": None,
"returncode": None
}
tempPath = tempfile.gettempdir() + os.sep + "w2bInAuxiliary.w2b" # [JORGE_GARCIA] modified for non console in windows
with open(tempPath, "w"):
pass
default = dict(
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=open(tempPath, "r"), # [JORGE_GARCIA] modified for non console in windows
shell=system() == "Windows"
)
default.update(kwargs)
kwargs = default
p = subprocess.Popen(*args, **kwargs)
try:
result['out'], result['err'] = p.communicate()
result['returncode'] = p.returncode
except KeyboardInterrupt:
raise exception.AbortedByUser()
finally:
for s in ("stdout", "stderr"):
if isinstance(kwargs[s], AsyncPipe):
kwargs[s].close()
for s in ("stdout", "stderr"):
if isinstance(kwargs[s], AsyncPipe):
result[s[3:]] = "\n".join(kwargs[s].get_buffer())
for k, v in result.iteritems():
if v and isinstance(v, basestring):
result[k].strip()
return result
def get_serialports():
try:
from serial.tools.list_ports import comports
except ImportError:
raise exception.GetSerialPortsError(os.name)
result = [{"port": p, "description": d, "hwid": h}
for p, d, h in comports() if p]
# fix for PySerial
if not result and system() == "Darwin":
for p in glob("/dev/tty.*"):
result.append({"port": p, "description": "", "hwid": ""})
return result
def get_logicaldisks():
disks = []
if system() == "Windows":
result = exec_command(
["wmic", "logicaldisk", "get", "name,VolumeName"]).get("out")
disknamere = re.compile(r"^([A-Z]{1}\:)\s*(\S+)?")
for line in result.split("\n"):
match = disknamere.match(line.strip())
if not match:
continue
disks.append({"disk": match.group(1), "name": match.group(2)})
else:
result = exec_command(["df"]).get("out")
disknamere = re.compile(r"\d+\%\s+([a-z\d\-_/]+)$", flags=re.I)
for line in result.split("\n"):
match = disknamere.search(line.strip())
if not match:
continue
disks.append({"disk": match.group(1),
"name": basename(match.group(1))})
return disks
def get_request_defheaders():
import requests
return {"User-Agent": "PlatformIO/%s CI/%d %s" % (
__version__, int(is_ci()), requests.utils.default_user_agent()
)}
def get_api_result(path, params=None, data=None):
import requests
result = None
r = None
try:
if data:
r = requests.post(__apiurl__ + path, params=params, data=data,
headers=get_request_defheaders())
else:
r = requests.get(__apiurl__ + path, params=params,
headers=get_request_defheaders())
result = r.json()
r.raise_for_status()
except requests.exceptions.HTTPError as e:
if result and "errors" in result:
raise exception.APIRequestError(result['errors'][0]['title'])
else:
raise exception.APIRequestError(e)
except requests.exceptions.ConnectionError:
raise exception.APIRequestError(
"Could not connect to PlatformIO Registry Service")
except ValueError:
raise exception.APIRequestError(
"Invalid response: %s" % r.text.encode("utf-8"))
finally:
if r:
r.close()
return result
def test_scons():
try:
r = exec_command(["scons", "--version"])
if "ImportError: No module named SCons.Script" in r['err']:
_PYTHONPATH = []
for p in sys.path:
if not p.endswith("-packages"):
continue
for item in glob(join(p, "scons*")):
if isdir(join(item, "SCons")) and item not in sys.path:
_PYTHONPATH.append(item)
sys.path.insert(0, item)
if _PYTHONPATH:
_PYTHONPATH = str(os.pathsep).join(_PYTHONPATH)
if os.getenv("PYTHONPATH"):
os.environ['PYTHONPATH'] += os.pathsep + _PYTHONPATH
else:
os.environ['PYTHONPATH'] = _PYTHONPATH
r = exec_command(["scons", "--version"])
assert r['returncode'] == 0
return True
except (OSError, AssertionError):
for p in sys.path:
try:
r = exec_command([join(p, "scons"), "--version"])
assert r['returncode'] == 0
os.environ['PATH'] += os.pathsep + p
return True
except (OSError, AssertionError):
pass
return False
def install_scons():
r = exec_command(["pip", "install", "-U", "scons"])
if r['returncode'] != 0:
r = exec_command(["pip", "install", "--egg", "scons",
'--install-option="--no-install-man"'])
return r['returncode'] == 0
def scons_in_pip():
r = exec_command(["pip", "list"])
if r['returncode'] != 0:
return False
return "scons (" in r['out'].lower()
@memoized
def _lookup_boards():
boards = {}
bdirs = [join(get_source_dir(), "boards")]
if isdir(join(get_home_dir(), "boards")):
bdirs.append(join(get_home_dir(), "boards"))
for bdir in bdirs:
for json_file in sorted(os.listdir(bdir)):
if not json_file.endswith(".json"):
continue
with open(join(bdir, json_file)) as f:
boards.update(json.load(f))
return boards
def get_boards(type_=None):
boards = _lookup_boards()
if type_ is None:
return boards
else:
if type_ not in boards:
raise exception.UnknownBoard(type_)
return boards[type_]
@memoized
def _lookup_frameworks():
frameworks = {}
frameworks_path = join(
get_source_dir(), "builder", "scripts", "frameworks")
frameworks_list = [f[:-3] for f in os.listdir(frameworks_path)
if not f.startswith("__") and f.endswith(".py")]
for _type in frameworks_list:
script_path = join(frameworks_path, "%s.py" % _type)
with open(script_path) as f:
fcontent = f.read()
assert '"""' in fcontent
_doc_start = fcontent.index('"""') + 3
fdoc = fcontent[
_doc_start:fcontent.index('"""', _doc_start)].strip()
doclines = [l.strip() for l in fdoc.splitlines() if l.strip()]
frameworks[_type] = {
"name": doclines[0],
"description": " ".join(doclines[1:-1]),
"url": doclines[-1],
"script": script_path
}
return frameworks
def get_frameworks(type_=None):
frameworks = _lookup_frameworks()
if type_ is None:
return frameworks
else:
if type_ not in frameworks:
raise exception.UnknownFramework(type_)
return frameworks[type_]
return frameworks
def where_is_program(program, envpath=None):
env = os.environ
if envpath:
env['PATH'] = envpath
# try OS's built-in commands
try:
result = exec_command(
["where" if "windows" in get_systype() else "which", program],
env=env
)
if result['returncode'] == 0 and isfile(result['out'].strip()):
return result['out'].strip()
except OSError:
pass
# look up in $PATH
for bin_dir in env.get("PATH", "").split(os.pathsep):
if isfile(join(bin_dir, program)):
return join(bin_dir, program)
elif isfile(join(bin_dir, "%s.exe" % program)):
return join(bin_dir, "%s.exe" % program)
return program
| lgpl-3.0 | -2,197,265,980,824,614,700 | 27.528926 | 121 | 0.568076 | false |
ewiger/runstat | python/runstat.py | 1 | 3779 | '''
Implementation of running variance/standard deviation.
The MIT License (MIT)
Copyright (c) 2015 Yauhen Yakimovich <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
from math import sqrt
from decimal import Decimal
__version__ = '1.0.0'
class RunStat(object):
'''
Class for keeping the running statistics of a continuously sampled one-
or multi-dimensional process/signal.
'''
def __init__(self, dtype=Decimal):
self.dtype = dtype
# (running) mean
self.m = dtype(0)
# counter of updates
self.n = 0
# (running) sum of the recurrence form:
# M(2,n) = M(2,n-1) + (x - mean(x_n))*(x - mean(x_{n-1}))
self.M2 = dtype(0)
# max/min
self.max_value = dtype(0)
self.min_value = dtype(0)
# weight of items seen
# TODO: implement this
self.total_weight = dtype(1)
@property
def mean(self):
return self.m
@property
def var(self):
if self.n > 2:
return self.M2 / (self.n - 1)
return self.M2 / self.n
@property
def std(self):
return self.dtype(sqrt(self.var))
@property
def min(self):
return self.min_value
@property
def max(self):
return self.max_value
@property
def count(self):
return self.n
def reset(self):
self.n = 0
self.is_started = False
def update(self, value, weight=None):
'''
Update running stats with weight equals 1 by default.
'''
# Initialize.
value = self.dtype(value)
self.n = self.n + 1
if self.n <= 1:
# First update.
self.m = value
self.M2 = self.dtype(0)
self.total_weight = self.dtype(0)
self.n = self.dtype(1)
return
# Update max/min.
if value > self.max_value:
self.max_value = value
elif value < self.min_value:
self.min_value = value
# No update.
delta = value - self.m
if delta == 0:
return
# Update running moments.
self.m = self.m + delta / self.n
if weight is None:
# Ignore weight
if self.n > 1:
self.M2 = self.M2 + delta * (value - self.m)
return
# Weight-aware implementation.
weight = self.dtype(weight)
next_weight = self.total_weight + weight
R = self.dtype(delta * (weight / next_weight))
self.m = self.m + R
if self.total_weight > 0:
self.M2 = self.M2 + self.total_weight * delta * R
self.total_weight = next_weight
def __call__(self, *args, **kwds):
self.update(*args, **kwds)
| mit | -5,212,956,734,936,615,000 | 27.413534 | 78 | 0.604922 | false |
Rezzie/Batcher | generators/g_cycle.py | 1 | 2394 | #!/usr/bin/env python
# Copyright (c) 2011, The University of York
# All rights reserved.
# Author(s):
# James Arnold <[email protected]>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the The University of York nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF YORK BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from generator import Generator
class cycle(Generator):
def __init__(self, choices, initial_offset=0):
assert len(choices) > 0
assert initial_offset <= len(choices)
self.__choices = choices
self.__offset = initial_offset
def Generate(self):
"""Return the next item in the list, wrapping around if necessary."""
while True:
yield self.__choices[self.__offset]
self.__offset += 1
if self.__offset >= len(self.__choices):
self.__offset = 0
if __name__ == "__main__":
from generator import PrintExamples
options = {'choices': ["James", "Ralph"],
'initial_offset': 0}
gen = cycle(**options)
PrintExamples(gen)
| bsd-3-clause | 8,469,629,835,516,487,000 | 39.576271 | 80 | 0.702172 | false |
landscapeio/prospector | prospector2/tools/pylint/linter.py | 1 | 2025 | from __future__ import absolute_import
from pylint.__pkginfo__ import numversion as PYLINT_VERSION
if PYLINT_VERSION >= (1, 5):
from pylint.config import OptionsManagerMixIn
from pylint.utils import _splitstrip
else:
from logilab.common.configuration import OptionsManagerMixIn
from pylint.lint import PyLinter
class ProspectorLinter(PyLinter): # pylint: disable=too-many-ancestors,too-many-public-methods
def __init__(self, found_files, *args, **kwargs):
self._files = found_files
# set up the standard PyLint linter
PyLinter.__init__(self, *args, **kwargs)
def config_from_file(self, config_file=None):
"""Will return `True` if plugins have been loaded. For pylint>=1.5. Else `False`."""
if PYLINT_VERSION >= (1, 5):
if PYLINT_VERSION >= (2, 0):
self.read_config_file(config_file)
else:
self.read_config_file(config_file, quiet=True)
if self.cfgfile_parser.has_option('MASTER', 'load-plugins'):
# pylint: disable=protected-access
plugins = _splitstrip(self.cfgfile_parser.get('MASTER', 'load-plugins'))
self.load_plugin_modules(plugins)
self.load_config_file()
return True
self.load_file_configuration(config_file)
return False
def reset_options(self):
# for example, we want to re-initialise the OptionsManagerMixin
# to supress the config error warning
# pylint: disable=non-parent-init-called
if PYLINT_VERSION >= (2, 0):
OptionsManagerMixIn.__init__(self, usage=PyLinter.__doc__)
else:
OptionsManagerMixIn.__init__(self, usage=PyLinter.__doc__, quiet=True)
def expand_files(self, modules):
expanded = PyLinter.expand_files(self, modules)
filtered = []
for module in expanded:
if self._files.check_module(module['path']):
filtered.append(module)
return filtered
| gpl-2.0 | 6,011,584,028,239,311,000 | 37.942308 | 95 | 0.624691 | false |
jettify/aiogibson | tests/test_parser.py | 1 | 3928 | import unittest
from aiogibson import errors
from aiogibson.parser import Reader, encode_command
class ParserTest(unittest.TestCase):
def test_not_found(self):
data = b'\x01\x00\x00\x01\x00\x00\x00\x00'
parser = Reader()
parser.feed(data)
obj = parser.gets()
self.assertEqual(obj, None)
def test_val(self):
data = b'\x06\x00\x00\x03\x00\x00\x00bar'
parser = Reader()
parser.feed(data)
resp = parser.gets()
self.assertEqual(resp, b'bar')
def test_kv(self):
data = b'\x07\x00\x007\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00' \
b'foo1\x00\x04\x00\x00\x00bar1\x04\x00\x00\x00' \
b'foo2\x00\x04\x00\x00\x00bar2\x04\x00\x00\x00f' \
b'oo3\x00\x04\x00\x00\x00bar3'
parser = Reader()
parser.feed(data)
obj = parser.gets()
expected = [b'foo1', b'bar1', b'foo2', b'bar2', b'foo3', b'bar3']
self.assertEqual(obj, expected)
def test_chunked_read(self):
parser = Reader()
data = [b'\x06\x00', b'\x00', b'\x03', b'\x00\x00', b'\x00', b'bar']
parser.feed(b'')
for i, b in enumerate(data):
parser.feed(b)
obj = parser.gets()
if i == len(data)-1:
self.assertEqual(obj, b'bar')
else:
self.assertEqual(obj, False)
data2 = [b'\x06\x00', b'\x00', b'\x03', b'\x00\x00', b'\x00', b'zap']
for i, b in enumerate(data2):
parser.feed(b)
obj = parser.gets()
if i == len(data2) - 1:
self.assertEqual(obj, b'zap')
else:
self.assertEqual(obj, False)
def test_data_error(self):
# case where we do not know how to unpack gibson data type
data = b'\x06\x00\x05\x03\x00\x00\x00bar'
parser = Reader()
parser.feed(data)
with self.assertRaises(errors.ProtocolError):
parser.gets()
def test_err_generic(self):
data = b'\x00\x00\x00\x01\x00\x00\x00\x00'
parser = Reader()
parser.feed(data)
obj = parser.gets()
self.assertIsInstance(obj, errors.GibsonError)
def test_err_nan(self):
data = b'\x02\x00\x00\x01\x00\x00\x00\x00'
parser = Reader()
parser.feed(data)
obj = parser.gets()
self.assertIsInstance(obj, errors.ExpectedANumber)
def test_err_mem(self):
data = b'\x03\x00\x00\x01\x00\x00\x00\x00'
parser = Reader()
parser.feed(data)
obj = parser.gets()
self.assertIsInstance(obj, errors.MemoryLimitError)
def test_err_locked(self):
data = b'\x04\x00\x00\x01\x00\x00\x00\x00'
parser = Reader()
parser.feed(data)
obj = parser.gets()
self.assertIsInstance(obj, errors.KeyLockedError)
def test_ok(self):
data = b'\x05\x00\x00\x01\x00\x00\x00\x00'
parser = Reader()
parser.feed(data)
obj = parser.gets()
self.assertEqual(obj, True)
def test_protocol_error(self):
data = b'\x09\x00\x00\x01\x00\x00\x00\x00'
parser = Reader()
parser.feed(data)
with self.assertRaises(errors.ProtocolError):
parser.gets()
def test_gb_encoding(self):
data = b'\x06\x00\x02\x08\x00\x00\x00M\x00\x00\x00\x00\x00\x00\x00'
parser = Reader()
parser.feed(data)
obj = parser.gets()
self.assertEqual(obj, 77)
def test_encode_command_set(self):
res = encode_command(b'set', 3600, 'foo', 3.14)
self.assertEqual(res, b'\x0f\x00\x00\x00\x01\x003600 foo 3.14')
res = encode_command(b'set', 3600, b'foo', bytearray(b'Q'))
self.assertEqual(res, b'\x0c\x00\x00\x00\x01\x003600 foo Q')
with self.assertRaises(TypeError):
encode_command(b'set', b'3600', b'foo', object())
| mit | -3,434,366,863,707,969,000 | 32.008403 | 77 | 0.562373 | false |
mekkablue/Glyphs-Scripts | Color Fonts/Merge Suffixed Glyphs into Color Layers.py | 1 | 10746 | #MenuTitle: Merge Suffixed Glyphs into Color Layers
# -*- coding: utf-8 -*-
from __future__ import division, print_function, unicode_literals
__doc__="""
Takes the master layer of suffixed glyphs (e.g., x.shadow, x.body, x.front) and turns them in a specified order into CPAL Color layers of the unsuffixed glyph (e.g., Color 1, Color 0, Color 2 of x).
"""
import vanilla
from copy import copy as copy
from AppKit import NSFont
class MergeSuffixedGlyphsIntoColorLayers( object ):
def __init__( self ):
# Window 'self.w':
windowWidth = 400
windowHeight = 300
windowWidthResize = 1000 # user can resize width by this value
windowHeightResize = 1000 # user can resize height by this value
self.w = vanilla.FloatingWindow(
( windowWidth, windowHeight ), # default window size
"Merge Suffixed Glyphs into Color Layers", # window title
minSize = ( windowWidth, windowHeight ), # minimum size (for resizing)
maxSize = ( windowWidth + windowWidthResize, windowHeight + windowHeightResize ), # maximum size (for resizing)
autosaveName = "com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.mainwindow" # stores last window position and size
)
# UI elements:
linePos, inset, lineHeight = 12, 15, 22
self.w.descriptionText = vanilla.TextBox( (inset, linePos+2, -inset, 14), "Merge suffixed glyphs into the following color indexes:", sizeStyle='small', selectable=True )
linePos += lineHeight
self.w.indexToSuffix = vanilla.TextEditor( (2, linePos, -2, -110), "# Syntax: CPAL index = glyph name suffix\n# list them in chronological order (bottom-up)\n# use hashtags for comments\n0=.shadow\n2=.body\n1=.front", callback=self.SavePreferences, checksSpelling=False )
#self.w.indexToSuffix.getNSTextEditor().setToolTip_("Syntax: colorindex=.suffix, use hashtags for comments. List them in chronological order (bottom-up). Example:\n0=.shadow\n2=.body\n1=.front")
self.w.indexToSuffix.getNSScrollView().setHasVerticalScroller_(1)
self.w.indexToSuffix.getNSScrollView().setHasHorizontalScroller_(1)
self.w.indexToSuffix.getNSScrollView().setRulersVisible_(0)
legibleFont = NSFont.legibileFontOfSize_(NSFont.systemFontSize())
textView = self.w.indexToSuffix.getNSTextView()
textView.setFont_(legibleFont)
textView.setHorizontallyResizable_(1)
textView.setVerticallyResizable_(1)
textView.setAutomaticDataDetectionEnabled_(1)
textView.setAutomaticLinkDetectionEnabled_(1)
textView.setDisplaysLinkToolTips_(1)
textSize = textView.minSize()
textSize.width = 1000
textView.setMinSize_(textSize)
linePos = -105
self.w.disableSuffixedGlyphs = vanilla.CheckBox( (inset, linePos-1, -inset, 20), "Deactivate export for glyphs with listed suffixes", value=True, callback=self.SavePreferences, sizeStyle='small' )
linePos += lineHeight
self.w.deletePreexistingColorLayers = vanilla.CheckBox( (inset, linePos-1, -inset, 20), "Delete preexisting Color layers in target glyphs", value=True, callback=self.SavePreferences, sizeStyle='small' )
linePos += lineHeight
self.w.processCompleteFont = vanilla.CheckBox( (inset, linePos-1, -inset, 20), "Process complete font (otherwise only add into selected glyphs)", value=False, callback=self.SavePreferences, sizeStyle='small' )
linePos += lineHeight
# Run Button:
self.w.runButton = vanilla.Button( (-80-inset, -20-inset, -inset, -inset), "Merge", sizeStyle='regular', callback=self.MergeSuffixedGlyphsIntoColorLayersMain )
# self.w.setDefaultButton( self.w.runButton )
# Load Settings:
if not self.LoadPreferences():
print("Note: 'Merge Suffixed Glyphs into Color Layers' could not load preferences. Will resort to defaults")
# Open window and focus on it:
self.w.open()
self.w.makeKey()
def SavePreferences( self, sender=None ):
try:
# write current settings into prefs:
Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.indexToSuffix"] = self.w.indexToSuffix.get()
Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.disableSuffixedGlyphs"] = self.w.disableSuffixedGlyphs.get()
Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.deletePreexistingColorLayers"] = self.w.deletePreexistingColorLayers.get()
Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.processCompleteFont"] = self.w.processCompleteFont.get()
return True
except:
import traceback
print(traceback.format_exc())
return False
def LoadPreferences( self ):
try:
# register defaults:
Glyphs.registerDefault("com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.indexToSuffix", "# CPAL index, followed by ‘=’, followed by glyph name suffix\n# list them in chronological order, i.e., bottom-up\n# use hashtags for comments\n0=.shadow\n2=.body\n1=.front")
Glyphs.registerDefault("com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.disableSuffixedGlyphs", 1)
Glyphs.registerDefault("com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.deletePreexistingColorLayers", 1)
Glyphs.registerDefault("com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.processCompleteFont", 1)
# load previously written prefs:
self.w.indexToSuffix.set( Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.indexToSuffix"] )
self.w.disableSuffixedGlyphs.set( Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.disableSuffixedGlyphs"] )
self.w.deletePreexistingColorLayers.set( Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.deletePreexistingColorLayers"] )
self.w.processCompleteFont.set( Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.processCompleteFont"] )
return True
except:
import traceback
print(traceback.format_exc())
return False
def nameContainsAnyOfTheseSuffixes(self, glyphName, allSuffixes):
for suffix in allSuffixes:
if suffix in glyphName:
return True
return False
def allSuffixes(self, suffixMapping):
suffixes = []
for mapping in suffixMapping:
suffix = mapping[0]
suffixes.append(suffix)
return set(suffixes)
def parseIndexSuffixList(self, textEntry):
suffixMapping = []
for line in textEntry.splitlines():
if "#" in line:
hashtagOffset = line.find("#")
line = line[:hashtagOffset]
if "=" in line:
items = line.split("=")
colorIndex = int(items[0].strip())
suffix = items[1].strip().split()[0]
suffixMapping.append((suffix, colorIndex))
return suffixMapping
def MergeSuffixedGlyphsIntoColorLayersMain( self, sender=None ):
try:
# clear macro window log:
Glyphs.clearLog()
# update settings to the latest user input:
if not self.SavePreferences():
print("Note: 'Merge Suffixed Glyphs into Color Layers' could not write preferences.")
thisFont = Glyphs.font # frontmost font
if thisFont is None:
Message(title="No Font Open", message="The script requires a font. Open a font and run the script again.", OKButton=None)
else:
print("Merge Suffixed Glyphs into Color Layers Report for %s" % thisFont.familyName)
if thisFont.filepath:
print(thisFont.filepath)
else:
print("⚠️ The font file has not been saved yet.")
print()
indexToSuffix = Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.indexToSuffix"]
disableSuffixedGlyphs = Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.disableSuffixedGlyphs"]
deletePreexistingColorLayers = Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.deletePreexistingColorLayers"]
processCompleteFont = Glyphs.defaults["com.mekkablue.MergeSuffixedGlyphsIntoColorLayers.processCompleteFont"]
suffixMapping = self.parseIndexSuffixList(indexToSuffix)
if not suffixMapping:
Message(title="Merge Error", message="No mapping could be derived from your text entry. Stick to the colorindex=.suffix syntax.", OKButton=None)
else:
allSuffixes = self.allSuffixes(suffixMapping)
if processCompleteFont:
glyphsToProcess = [g for g in thisFont.glyphs if not self.nameContainsAnyOfTheseSuffixes(g.name, allSuffixes)]
else:
glyphsToProcess = [l.parent for l in thisFont.selectedLayers if not self.nameContainsAnyOfTheseSuffixes(l.parent.name, allSuffixes)]
for targetGlyph in glyphsToProcess:
glyphName = targetGlyph.name
print("🔠 %s" % glyphName)
if deletePreexistingColorLayers:
print("⚠️ Deleting preexisting Color layers...")
for i in reversed(range(len(targetGlyph.layers))):
potentialColorLayer = targetGlyph.layers[i]
if not potentialColorLayer.isMasterLayer:
deleteThisLayer = False
try:
# GLYPHS 3
if potentialColorLayer.isColorPaletteLayer():
deleteThisLayer = True
except:
# GLYPHS 2
if potentialColorLayer.name.startswith("Color "):
deleteThisLayer = True
if deleteThisLayer:
print(" 🚫 Removing Color layer ‘%s’" % potentialColorLayer.name)
currentLayerID = potentialColorLayer.layerId
try:
# GLYPHS 3
targetGlyph.removeLayerForId_(currentLayerID)
except:
# GLYPHS 2
targetGlyph.removeLayerForKey_(currentLayerID)
for mapping in suffixMapping:
suffix = mapping[0]
colorIndex = mapping[1]
suffixGlyphName = "%s%s"%(glyphName, suffix)
suffixGlyph = thisFont.glyphs[suffixGlyphName]
if not suffixGlyph:
print("⚠️ Not found: %s"%suffixGlyphName)
else:
print("✅ Merging %s into CPAL Color %i" % (suffixGlyphName, colorIndex))
if suffixGlyph.export and disableSuffixedGlyphs:
suffixGlyph.export = False
for master in thisFont.masters:
mID = master.id
colorLayer = copy(suffixGlyph.layers[mID])
colorLayer.associatedMasterId = mID
try:
# GLYPHS 3
colorLayer.setColorPaletteLayer_(1)
colorLayer.setAttribute_forKey_(colorIndex, "colorPalette")
except:
# GLYPHS 2
colorLayer.name = "Color %i" % colorIndex
targetGlyph.layers.append(colorLayer)
# self.w.close() # delete if you want window to stay open
# Final report:
Glyphs.showNotification(
"%s: Done" % (thisFont.familyName),
"Merge Suffixed Glyphs into Color Layers is finished. Details in Macro Window",
)
print("\nDone.")
except Exception as e:
# brings macro window to front and reports error:
Glyphs.showMacroWindow()
print("Merge Suffixed Glyphs into Color Layers Error: %s" % e)
import traceback
print(traceback.format_exc())
MergeSuffixedGlyphsIntoColorLayers() | apache-2.0 | -2,315,050,609,437,465,600 | 44.227848 | 273 | 0.727654 | false |
april1452/craigslove | generate_post.py | 1 | 3348 | import string
import sys
import argparse
import csv
import random
# import scipy
# import scipy.stats
# from scipy.stats import rv_discrete
from collections import defaultdict
# locations = ['chicago', 'dallas', 'denver', 'jacksonville', 'lasvegas', 'losangeles', 'miami', 'minneapolis', 'newyork', 'oklahomacity', 'providence', 'seattle', 'sfbay', 'washingtondc']
locations = ['providence']
posttype = ['m4m', 'm4w', 'msr', 'stp', 'w4m', 'w4w']
def main():
for location in locations:
with open('posts/' + location + '/w4w.csv') as f:
reader = csv.reader(f)
entries = []
for row in reader:
entry = row[8]
entries.append(entry)
transition_matrix = calculate_transition_matrix(entries)
# sampling_mechanism = defaultdict(lambda: None)
# for w in transition_matrix:
# xk = []
# pk = []
# for w_prime in transition_matrix:
# xk.append(w_prime)
# pk.append(transition_matrix[w][w_prime])
# sampling_mechanism[w] = rv_discrete(values=(xk, pk))
prev_word = None
next_word = '*START*'
generated_post = ''
while next_word != '*END*':
prev_word = next_word
random_probability = random.random() # between 0 and 1
cumulative_probability = 0.0
# next_word = sampling_mechanism[prev_word].rvs()
for w_prime in transition_matrix[prev_word]:
cumulative_probability += transition_matrix[prev_word][w_prime]
if cumulative_probability > random_probability:
next_word = w_prime
break
if len(next_word) > 1 or next_word in (string.punctuation + 'i' + 'a'):
generated_post += next_word + ' '
print generated_post[:-7]
def tokenize(words):
index = 0
while index < len(words):
if len(words[index]) > 1 and words[index][-1] in string.punctuation:
words[index] = words[index][:-1]
words.insert(index + 1, words[index][-1])
index += 1
return words
def calculate_transition_matrix(training_data): # training_data is a list of strings
transition_matrix = defaultdict(lambda: defaultdict(float))
for post in training_data:
words = tokenize(post.lower().split())
transition_matrix['*START*'][words[0]] += 1.0
for i in range(len(words) - 1):
transition_matrix[words[i]][words[i + 1]] += 1.0
transition_matrix[words[len(words) - 1]]['*END*'] += 1.0
for w in transition_matrix:
unigram_count = 0
for w_prime in transition_matrix[w]:
unigram_count += transition_matrix[w][w_prime]
for w_prime in transition_matrix[w]:
transition_matrix[w][w_prime] = transition_matrix[w][w_prime] / unigram_count
return transition_matrix
if __name__ == "__main__":
main() | mit | -8,948,929,042,323,856,000 | 32.828283 | 188 | 0.512843 | false |
NoahPeeters/pymathexpressions | mathexpressions/lib.py | 1 | 2465 | __author__ = 'Noah Peeters'
import math
const = {
'pi': [math.pi, '\pi'],
'e': [math.e, 'e']
}
float_chars = [str(x) for x in range(10)]
float_chars.append('.')
operators = ['+', '-', '*', '/', '^', '%', '=']
operators_priorities = [0, 0, 1, 1, 2, 1, 0]
operators_latex = ['%s+%s', '%s-%s', '%s*%s', '\\frac{%s}{%s}', '%s^{%s}', '%s\\mod%s', '%s=%s']
max_priority = 2
def use_operator(o, para1, para2):
if o == '+':
return para1 + para2
elif o == '-':
return para1 - para2
elif o == '*':
return para1 * para2
elif o == '/':
return para1 / para2
elif o == '^':
return math.pow(para1, para2)
elif o == '%':
return math.pow(para1, para2)
elif o == '=':
return None
def latex_operator(o, para1, para2):
index = operators.index(o)
return operators_latex[index] % (para1, para2)
def get_priority(p):
return operators_priorities[operators.index(p.name)]
def is_number(name):
if len(name) == 0:
return False
for i in name:
if i not in float_chars:
return False
return True
functions = {
'acos': '\\arccos(%s)',
'acosh': None,
'asin': '\\arcsin(%s)',
'asinh': None,
'atan': '\\arctan(%s)',
'atan2': None,
'atanh': None,
'ceil': None,
'copysign': None,
'cos': '\\cos(%s)',
'cosh': '\\cosh(%s)',
'degrees': None,
'erf': None,
'erfc': None,
'exp': 'e^{%s}',
'expm1': 'e^{%s}-1',
'abs': '|%s|',
'factorial': '%s!',
'floor': None,
'fmod': '%s\\mod%s',
'gamma': None,
'hypot': '\\sqrt(%s^{2}+%s^{2})',
'ldexp': None,
'lgamma': None,
'log': '\\log(%s)',
'log10': '\\log_10(%s)',
'logn': None, # latex support
'pow': '%s^{%s}',
'radians': None,
'round': None,
'roundn': None,
'sin': '\\sin(%s)',
'sinh': '\\sinh(%s)',
'sqrt': '\\sqrt(%s)',
'tan': '\\tan(%s)',
'tanh': '\\tanh(%s)'
}
def use_function(name, para):
if name == 'logn':
return math.log(para[0], para[1])
elif name == 'round':
return round(para[0])
elif name == 'roundn':
return round(para[0], para[1])
elif name == 'abs':
return math.fabs(para[0])
else:
return getattr(math, name)(*para)
def get_function_latex(name, para):
if name == 'logn':
return '\\log_%s(%s)' % (para[1], para[0])
else:
return functions[name] % tuple(para) | mit | 4,933,296,013,770,592,000 | 21.017857 | 96 | 0.481136 | false |
rafaelthca/OmniDB | OmniDB/OmniDB_app/urls.py | 1 | 24922 | from django.conf.urls import url
from django.urls import include, path
from . import views
from django.conf import settings
from django.conf.urls.static import static
base_urlpatterns = [
url(r'^upload/$', views.plugins.upload_view, name='sign_in'),
#LOGIN
url(r'^$', views.login.index, name='login'),
url(r'^login/', views.login.index, name='login'),
url(r'^logout/', views.login.logout, name='logout'),
url(r'^check_session_message/$', views.login.check_session_message, name='check_session_message'),
url(r'^sign_in/$', views.login.sign_in, name='sign_in'),
#CONNECTIONS
url(r'^connections/', views.connections.index, name='connections'),
url(r'^get_connections/$', views.connections.get_connections, name='get_connections'),
url(r'^save_connections/$', views.connections.save_connections, name='save_connections'),
url(r'^test_connection/$', views.connections.test_connection, name='test_connection'),
url(r'^select_connection/$', views.connections.select_connection, name='select_connection'),
url(r'^get_groups/$', views.connections.get_groups, name='get_groups'),
url(r'^new_group/$', views.connections.new_group, name='new_group'),
url(r'^edit_group/$', views.connections.edit_group, name='edit_group'),
url(r'^delete_group/$', views.connections.delete_group, name='delete_group'),
#USERS
url(r'^get_users/$', views.users.get_users, name='get_users'),
url(r'^new_user/$', views.users.new_user, name='new_user'),
url(r'^remove_user/$', views.users.remove_user, name='remove_user'),
url(r'^save_users/$', views.users.save_users, name='save_users'),
#MONITORING
url(r'^monitoring/', views.monitoring.index, name='monitoring'),
url(r'^get_nodes/$', views.monitoring.get_nodes, name='get_nodes'),
url(r'^new_node/$', views.monitoring.new_node, name='new_node'),
url(r'^remove_node/$', views.monitoring.remove_node, name='remove_node'),
url(r'^refresh_node_key/$', views.monitoring.refresh_node_key, name='refresh_node_key'),
url(r'^save_nodes/$', views.monitoring.save_nodes, name='save_nodes'),
url(r'^get_alerts/$', views.monitoring.get_alerts, name='get_alerts'),
url(r'^new_alert/$', views.monitoring.new_alert, name='new_alert'),
url(r'^remove_alert/$', views.monitoring.remove_alert, name='remove_alert'),
url(r'^save_alerts/$', views.monitoring.save_alerts, name='save_alerts'),
url(r'^get_alert_data_list/$', views.monitoring.get_alert_data_list, name='get_alert_data_list'),
url(r'^view_alert_chart/$', views.monitoring.view_alert_chart, name='view_alert_chart'),
url(r'^receive_alert_data/', views.monitoring.receive_alert_data, name='receive_alert_data'),
#WORKSPACE
url(r'^workspace/', views.workspace.index, name='workspace'),
url(r'^shortcuts/', views.workspace.shortcuts, name='shortcuts'),
url(r'^close_welcome/', views.workspace.close_welcome, name='close_welcome'),
url(r'^save_config_user/', views.workspace.save_config_user, name='save_config_user'),
url(r'^save_shortcuts/', views.workspace.save_shortcuts, name='save_shortcuts'),
url(r'^get_database_list/', views.workspace.get_database_list, name='get_database_list'),
url(r'^renew_password/', views.workspace.renew_password, name='renew_password'),
url(r'^draw_graph/', views.workspace.draw_graph, name='draw_graph'),
url(r'^alter_table_data/', views.workspace.alter_table_data, name='alter_table_data'),
url(r'^save_alter_table/', views.workspace.save_alter_table, name='save_alter_table'),
url(r'^start_edit_data/', views.workspace.start_edit_data, name='start_edit_data'),
url(r'^get_completions/', views.workspace.get_completions, name='get_completions'),
url(r'^get_completions_table/', views.workspace.get_completions_table, name='get_completions_table'),
url(r'^get_command_list/', views.workspace.get_command_list, name='get_command_list'),
url(r'^clear_command_list/', views.workspace.clear_command_list, name='clear_command_list'),
url(r'^indent_sql/', views.workspace.indent_sql, name='indent_sql'),
url(r'^refresh_monitoring/', views.workspace.refresh_monitoring, name='refresh_monitoring'),
url(r'^get_console_history/', views.workspace.get_console_history, name='get_console_history'),
url(r'^get_console_history_clean/', views.workspace.get_console_history_clean, name='get_console_history_clean'),
url(r'^get_autocomplete_results/', views.workspace.get_autocomplete_results, name='get_autocomplete_results'),
url(r'^delete_plugin/', views.plugins.delete_plugin, name='delete_plugin'),
#HOOKS
url(r'^get_plugins/', views.plugins.get_plugins, name='get_plugins'),
url(r'^list_plugins/', views.plugins.list_plugins, name='list_plugins'),
url(r'^reload_plugins/', views.plugins.reload_plugins, name='reload_plugins'),
url(r'^exec_plugin_function/', views.plugins.exec_plugin_function, name='exec_plugin_function'),
#TREE_SNIPPETS
url(r'^get_node_children/', views.tree_snippets.get_node_children, name='get_node_children'),
url(r'^get_snippet_text/', views.tree_snippets.get_snippet_text, name='get_snippet_text'),
url(r'^new_node_snippet/', views.tree_snippets.new_node_snippet, name='new_node_snippet'),
url(r'^delete_node_snippet/', views.tree_snippets.delete_node_snippet, name='delete_node_snippet'),
url(r'^save_snippet_text/', views.tree_snippets.save_snippet_text, name='save_snippet_text'),
url(r'^rename_node_snippet/', views.tree_snippets.rename_node_snippet, name='rename_node_snippet'),
#TREE_POSTGRESQL
url(r'^get_tree_info_postgresql/', views.tree_postgresql.get_tree_info, name='get_tree_info'),
url(r'^get_tables_postgresql/', views.tree_postgresql.get_tables, name='get_tables'),
url(r'^get_schemas_postgresql/', views.tree_postgresql.get_schemas, name='get_schemas'),
url(r'^get_columns_postgresql/', views.tree_postgresql.get_columns, name='get_columns'),
url(r'^get_pk_postgresql/', views.tree_postgresql.get_pk, name='get_pk'),
url(r'^get_pk_columns_postgresql/', views.tree_postgresql.get_pk_columns, name='get_pk_columns'),
url(r'^get_fks_postgresql/', views.tree_postgresql.get_fks, name='get_fks'),
url(r'^get_fks_columns_postgresql/', views.tree_postgresql.get_fks_columns, name='get_fks_columns'),
url(r'^get_uniques_postgresql/', views.tree_postgresql.get_uniques, name='get_uniques'),
url(r'^get_uniques_columns_postgresql/', views.tree_postgresql.get_uniques_columns, name='get_uniques_columns'),
url(r'^get_indexes_postgresql/', views.tree_postgresql.get_indexes, name='get_indexes'),
url(r'^get_indexes_columns_postgresql/', views.tree_postgresql.get_indexes_columns, name='get_indexes_columns'),
url(r'^get_checks_postgresql/', views.tree_postgresql.get_checks, name='get_checks'),
url(r'^get_excludes_postgresql/', views.tree_postgresql.get_excludes, name='get_excludes'),
url(r'^get_rules_postgresql/', views.tree_postgresql.get_rules, name='get_rules'),
url(r'^get_rule_definition_postgresql/', views.tree_postgresql.get_rule_definition, name='get_rule_definition'),
url(r'^get_triggers_postgresql/', views.tree_postgresql.get_triggers, name='get_triggers'),
url(r'^get_eventtriggers_postgresql/', views.tree_postgresql.get_eventtriggers, name='get_eventtriggers'),
url(r'^get_inheriteds_postgresql/', views.tree_postgresql.get_inheriteds, name='get_inheriteds'),
url(r'^get_inheriteds_parents_postgresql/', views.tree_postgresql.get_inheriteds_parents, name='get_inheriteds_parents'),
url(r'^get_inheriteds_children_postgresql/', views.tree_postgresql.get_inheriteds_children, name='get_inheriteds_children'),
url(r'^get_partitions_postgresql/', views.tree_postgresql.get_partitions, name='get_partitions'),
url(r'^get_partitions_parents_postgresql/', views.tree_postgresql.get_partitions_parents, name='get_partitions_parents'),
url(r'^get_partitions_children_postgresql/', views.tree_postgresql.get_partitions_children, name='get_partitions_children'),
url(r'^get_functions_postgresql/', views.tree_postgresql.get_functions, name='get_functions'),
url(r'^get_function_fields_postgresql/', views.tree_postgresql.get_function_fields, name='get_function_fields'),
url(r'^get_function_definition_postgresql/', views.tree_postgresql.get_function_definition, name='get_function_definition'),
url(r'^get_function_debug_postgresql/', views.tree_postgresql.get_function_debug, name='get_function_debug'),
url(r'^get_procedures_postgresql/', views.tree_postgresql.get_procedures, name='get_procedures'),
url(r'^get_procedure_fields_postgresql/', views.tree_postgresql.get_procedure_fields, name='get_procedure_fields'),
url(r'^get_procedure_definition_postgresql/', views.tree_postgresql.get_procedure_definition, name='get_procedure_definition'),
url(r'^get_procedure_debug_postgresql/', views.tree_postgresql.get_procedure_debug, name='get_procedure_debug'),
url(r'^get_triggerfunctions_postgresql/', views.tree_postgresql.get_triggerfunctions, name='get_triggerfunctions'),
url(r'^get_triggerfunction_definition_postgresql/', views.tree_postgresql.get_triggerfunction_definition, name='get_triggerfunction_definition'),
url(r'^get_eventtriggerfunctions_postgresql/', views.tree_postgresql.get_eventtriggerfunctions, name='get_eventtriggerfunctions'),
url(r'^get_eventtriggerfunction_definition_postgresql/', views.tree_postgresql.get_eventtriggerfunction_definition, name='get_eventtriggerfunction_definition'),
url(r'^get_sequences_postgresql/', views.tree_postgresql.get_sequences, name='get_sequences'),
url(r'^get_views_postgresql/', views.tree_postgresql.get_views, name='get_views'),
url(r'^get_views_columns_postgresql/', views.tree_postgresql.get_views_columns, name='get_views_columns'),
url(r'^get_view_definition_postgresql/', views.tree_postgresql.get_view_definition, name='get_view_definition'),
url(r'^get_mviews_postgresql/', views.tree_postgresql.get_mviews, name='get_mviews'),
url(r'^get_mviews_columns_postgresql/', views.tree_postgresql.get_mviews_columns, name='get_mviews_columns'),
url(r'^get_mview_definition_postgresql/', views.tree_postgresql.get_mview_definition, name='get_mview_definition'),
url(r'^get_databases_postgresql/', views.tree_postgresql.get_databases, name='get_databases'),
url(r'^get_tablespaces_postgresql/', views.tree_postgresql.get_tablespaces, name='get_tablespaces'),
url(r'^get_roles_postgresql/', views.tree_postgresql.get_roles, name='get_roles'),
url(r'^get_extensions_postgresql/', views.tree_postgresql.get_extensions, name='get_extensions'),
url(r'^get_physicalreplicationslots_postgresql/', views.tree_postgresql.get_physicalreplicationslots, name='get_physicalreplicationslots'),
url(r'^get_logicalreplicationslots_postgresql/', views.tree_postgresql.get_logicalreplicationslots, name='get_logicalreplicationslots'),
url(r'^get_publications_postgresql/', views.tree_postgresql.get_publications, name='get_publications'),
url(r'^get_subscriptions_postgresql/', views.tree_postgresql.get_subscriptions, name='get_subscriptions'),
url(r'^get_publication_tables_postgresql/', views.tree_postgresql.get_publication_tables, name='get_publication_tables'),
url(r'^get_subscription_tables_postgresql/', views.tree_postgresql.get_subscription_tables, name='get_subscription_tables'),
url(r'^get_foreign_data_wrappers_postgresql/', views.tree_postgresql.get_foreign_data_wrappers, name='get_foreign_data_wrappers'),
url(r'^get_foreign_servers_postgresql/', views.tree_postgresql.get_foreign_servers, name='get_foreign_servers'),
url(r'^get_user_mappings_postgresql/', views.tree_postgresql.get_user_mappings, name='get_user_mappings'),
url(r'^get_foreign_tables_postgresql/', views.tree_postgresql.get_foreign_tables, name='get_foreign_tables'),
url(r'^get_foreign_columns_postgresql/', views.tree_postgresql.get_foreign_columns, name='get_foreign_columns'),
url(r'^get_types_postgresql/', views.tree_postgresql.get_types, name='get_types'),
url(r'^get_domains_postgresql/', views.tree_postgresql.get_domains, name='get_domains'),
url(r'^kill_backend_postgresql/', views.tree_postgresql.kill_backend, name='kill_backend'),
url(r'^get_properties_postgresql/', views.tree_postgresql.get_properties, name='get_properties'),
url(r'^get_database_objects_postgresql/', views.tree_postgresql.get_database_objects, name='get_database_objects'),
url(r'^template_select_postgresql/', views.tree_postgresql.template_select, name='template_select'),
url(r'^template_insert_postgresql/', views.tree_postgresql.template_insert, name='template_insert'),
url(r'^template_update_postgresql/', views.tree_postgresql.template_update, name='template_update'),
url(r'^template_select_function_postgresql/', views.tree_postgresql.template_select_function, name='template_select_function'),
url(r'^template_call_procedure_postgresql/', views.tree_postgresql.template_call_procedure, name='template_call_procedure'),
url(r'^change_active_database/', views.workspace.change_active_database, name='change_active_database'),
url(r'^get_postgresql_version/', views.tree_postgresql.get_version, name='get_version'),
#TREE_ORACLE
url(r'^get_tree_info_oracle/', views.tree_oracle.get_tree_info, name='get_tree_info'),
url(r'^get_tables_oracle/', views.tree_oracle.get_tables, name='get_tables'),
url(r'^get_columns_oracle/', views.tree_oracle.get_columns, name='get_columns'),
url(r'^get_pk_oracle/', views.tree_oracle.get_pk, name='get_pk'),
url(r'^get_pk_columns_oracle/', views.tree_oracle.get_pk_columns, name='get_pk_columns'),
url(r'^get_fks_oracle/', views.tree_oracle.get_fks, name='get_fks'),
url(r'^get_fks_columns_oracle/', views.tree_oracle.get_fks_columns, name='get_fks_columns'),
url(r'^get_uniques_oracle/', views.tree_oracle.get_uniques, name='get_uniques'),
url(r'^get_uniques_columns_oracle/', views.tree_oracle.get_uniques_columns, name='get_uniques_columns'),
url(r'^get_indexes_oracle/', views.tree_oracle.get_indexes, name='get_indexes'),
url(r'^get_indexes_columns_oracle/', views.tree_oracle.get_indexes_columns, name='get_indexes_columns'),
#url(r'^get_triggers_oracle/', views.tree_oracle.get_triggers, name='get_triggers'),
#url(r'^get_partitions_oracle/', views.tree_oracle.get_partitions, name='get_partitions'),
url(r'^get_functions_oracle/', views.tree_oracle.get_functions, name='get_functions'),
url(r'^get_function_fields_oracle/', views.tree_oracle.get_function_fields, name='get_function_fields'),
url(r'^get_function_definition_oracle/', views.tree_oracle.get_function_definition, name='get_function_definition'),
url(r'^get_procedures_oracle/', views.tree_oracle.get_procedures, name='get_procedures'),
url(r'^get_procedure_fields_oracle/', views.tree_oracle.get_procedure_fields, name='get_procedure_fields'),
url(r'^get_procedure_definition_oracle/', views.tree_oracle.get_procedure_definition, name='get_procedure_definition'),
#url(r'^get_function_debug_oracle/', views.tree_oracle.get_function_debug, name='get_function_debug'),
#url(r'^get_triggerfunctions_oracle/', views.tree_oracle.get_triggerfunctions, name='get_triggerfunctions'),
#url(r'^get_triggerfunction_definition_oracle/', views.tree_oracle.get_triggerfunction_definition, name='get_triggerfunction_definition'),
url(r'^get_sequences_oracle/', views.tree_oracle.get_sequences, name='get_sequences'),
url(r'^get_views_oracle/', views.tree_oracle.get_views, name='get_views'),
url(r'^get_views_columns_oracle/', views.tree_oracle.get_views_columns, name='get_views_columns'),
url(r'^get_view_definition_oracle/', views.tree_oracle.get_view_definition, name='get_view_definition'),
#url(r'^get_mviews_oracle/', views.tree_oracle.get_mviews, name='get_mviews'),
#url(r'^get_mviews_columns_oracle/', views.tree_oracle.get_mviews_columns, name='get_mviews_columns'),
#url(r'^get_mview_definition_oracle/', views.tree_oracle.get_mview_definition, name='get_mview_definition'),
url(r'^get_tablespaces_oracle/', views.tree_oracle.get_tablespaces, name='get_tablespaces'),
url(r'^get_roles_oracle/', views.tree_oracle.get_roles, name='get_roles'),
url(r'^kill_backend_oracle/', views.tree_oracle.kill_backend, name='kill_backend'),
url(r'^get_properties_oracle/', views.tree_oracle.get_properties, name='get_properties'),
url(r'^template_select_oracle/', views.tree_oracle.template_select, name='template_select'),
url(r'^template_insert_oracle/', views.tree_oracle.template_insert, name='template_insert'),
url(r'^template_update_oracle/', views.tree_oracle.template_update, name='template_update'),
#TREE_MYSQL
url(r'^get_tree_info_mysql/', views.tree_mysql.get_tree_info, name='get_tree_info'),
url(r'^get_tables_mysql/', views.tree_mysql.get_tables, name='get_tables'),
url(r'^get_columns_mysql/', views.tree_mysql.get_columns, name='get_columns'),
url(r'^get_pk_mysql/', views.tree_mysql.get_pk, name='get_pk'),
url(r'^get_pk_columns_mysql/', views.tree_mysql.get_pk_columns, name='get_pk_columns'),
url(r'^get_fks_mysql/', views.tree_mysql.get_fks, name='get_fks'),
url(r'^get_fks_columns_mysql/', views.tree_mysql.get_fks_columns, name='get_fks_columns'),
url(r'^get_uniques_mysql/', views.tree_mysql.get_uniques, name='get_uniques'),
url(r'^get_uniques_columns_mysql/', views.tree_mysql.get_uniques_columns, name='get_uniques_columns'),
url(r'^get_indexes_mysql/', views.tree_mysql.get_indexes, name='get_indexes'),
url(r'^get_indexes_columns_mysql/', views.tree_mysql.get_indexes_columns, name='get_indexes_columns'),
#url(r'^get_triggers_mysql/', views.tree_mysql.get_triggers, name='get_triggers'),
#url(r'^get_partitions_mysql/', views.tree_mysql.get_partitions, name='get_partitions'),
url(r'^get_functions_mysql/', views.tree_mysql.get_functions, name='get_functions'),
url(r'^get_function_fields_mysql/', views.tree_mysql.get_function_fields, name='get_function_fields'),
url(r'^get_function_definition_mysql/', views.tree_mysql.get_function_definition, name='get_function_definition'),
url(r'^get_procedures_mysql/', views.tree_mysql.get_procedures, name='get_procedures'),
url(r'^get_procedure_fields_mysql/', views.tree_mysql.get_procedure_fields, name='get_procedure_fields'),
url(r'^get_procedure_definition_mysql/', views.tree_mysql.get_procedure_definition, name='get_procedure_definition'),
#url(r'^get_function_debug_mysql/', views.tree_mysql.get_function_debug, name='get_function_debug'),
#url(r'^get_triggerfunctions_mysql/', views.tree_mysql.get_triggerfunctions, name='get_triggerfunctions'),
#url(r'^get_triggerfunction_definition_mysql/', views.tree_mysql.get_triggerfunction_definition, name='get_triggerfunction_definition'),
#url(r'^get_sequences_mysql/', views.tree_mysql.get_sequences, name='get_sequences'),
url(r'^get_views_mysql/', views.tree_mysql.get_views, name='get_views'),
url(r'^get_views_columns_mysql/', views.tree_mysql.get_views_columns, name='get_views_columns'),
url(r'^get_view_definition_mysql/', views.tree_mysql.get_view_definition, name='get_view_definition'),
url(r'^get_databases_mysql/', views.tree_mysql.get_databases, name='get_databases'),
url(r'^get_roles_mysql/', views.tree_mysql.get_roles, name='get_roles'),
url(r'^kill_backend_mysql/', views.tree_mysql.kill_backend, name='kill_backend'),
url(r'^get_properties_mysql/', views.tree_mysql.get_properties, name='get_properties'),
url(r'^template_select_mysql/', views.tree_mysql.template_select, name='template_select'),
url(r'^template_insert_mysql/', views.tree_mysql.template_insert, name='template_insert'),
url(r'^template_update_mysql/', views.tree_mysql.template_update, name='template_update'),
#TREE_MARIADB
url(r'^get_tree_info_mariadb/', views.tree_mariadb.get_tree_info, name='get_tree_info'),
url(r'^get_tables_mariadb/', views.tree_mariadb.get_tables, name='get_tables'),
url(r'^get_columns_mariadb/', views.tree_mariadb.get_columns, name='get_columns'),
url(r'^get_pk_mariadb/', views.tree_mariadb.get_pk, name='get_pk'),
url(r'^get_pk_columns_mariadb/', views.tree_mariadb.get_pk_columns, name='get_pk_columns'),
url(r'^get_fks_mariadb/', views.tree_mariadb.get_fks, name='get_fks'),
url(r'^get_fks_columns_mariadb/', views.tree_mariadb.get_fks_columns, name='get_fks_columns'),
url(r'^get_uniques_mariadb/', views.tree_mariadb.get_uniques, name='get_uniques'),
url(r'^get_uniques_columns_mariadb/', views.tree_mariadb.get_uniques_columns, name='get_uniques_columns'),
url(r'^get_indexes_mariadb/', views.tree_mariadb.get_indexes, name='get_indexes'),
url(r'^get_indexes_columns_mariadb/', views.tree_mariadb.get_indexes_columns, name='get_indexes_columns'),
#url(r'^get_triggers_mariadb/', views.tree_mariadb.get_triggers, name='get_triggers'),
#url(r'^get_partitions_mariadb/', views.tree_mariadb.get_partitions, name='get_partitions'),
url(r'^get_functions_mariadb/', views.tree_mariadb.get_functions, name='get_functions'),
url(r'^get_function_fields_mariadb/', views.tree_mariadb.get_function_fields, name='get_function_fields'),
url(r'^get_function_definition_mariadb/', views.tree_mariadb.get_function_definition, name='get_function_definition'),
url(r'^get_procedures_mariadb/', views.tree_mariadb.get_procedures, name='get_procedures'),
url(r'^get_procedure_fields_mariadb/', views.tree_mariadb.get_procedure_fields, name='get_procedure_fields'),
url(r'^get_procedure_definition_mariadb/', views.tree_mariadb.get_procedure_definition, name='get_procedure_definition'),
#url(r'^get_function_debug_mariadb/', views.tree_mariadb.get_function_debug, name='get_function_debug'),
#url(r'^get_triggerfunctions_mariadb/', views.tree_mariadb.get_triggerfunctions, name='get_triggerfunctions'),
#url(r'^get_triggerfunction_definition_mariadb/', views.tree_mariadb.get_triggerfunction_definition, name='get_triggerfunction_definition'),
url(r'^get_sequences_mariadb/', views.tree_mariadb.get_sequences, name='get_sequences'),
url(r'^get_views_mariadb/', views.tree_mariadb.get_views, name='get_views'),
url(r'^get_views_columns_mariadb/', views.tree_mariadb.get_views_columns, name='get_views_columns'),
url(r'^get_view_definition_mariadb/', views.tree_mariadb.get_view_definition, name='get_view_definition'),
url(r'^get_databases_mariadb/', views.tree_mariadb.get_databases, name='get_databases'),
url(r'^get_roles_mariadb/', views.tree_mariadb.get_roles, name='get_roles'),
url(r'^kill_backend_mariadb/', views.tree_mariadb.kill_backend, name='kill_backend'),
url(r'^get_properties_mariadb/', views.tree_mariadb.get_properties, name='get_properties'),
url(r'^template_select_mariadb/', views.tree_mariadb.template_select, name='template_select'),
url(r'^template_insert_mariadb/', views.tree_mariadb.template_insert, name='template_insert'),
url(r'^template_update_mariadb/', views.tree_mariadb.template_update, name='template_update'),
#MONITORING SYSTEM
url(r'^get_monitor_nodes/', views.monitor_dashboard.get_monitor_nodes, name='get_monitor_nodes'),
url(r'^test_monitor_script/', views.monitor_dashboard.test_monitor_script, name='test_monitor_script'),
url(r'^get_monitor_unit_list/', views.monitor_dashboard.get_monitor_unit_list, name='get_monitor_unit_list'),
url(r'^get_monitor_unit_details/', views.monitor_dashboard.get_monitor_unit_details, name='get_monitor_unit_details'),
url(r'^get_monitor_units/', views.monitor_dashboard.get_monitor_units, name='get_monitor_units'),
url(r'^refresh_monitor_units/', views.monitor_dashboard.refresh_monitor_units, name='refresh_monitor_units'),
url(r'^get_monitor_unit_template/', views.monitor_dashboard.get_monitor_unit_template, name='get_monitor_unit_template'),
url(r'^save_monitor_unit/', views.monitor_dashboard.save_monitor_unit, name='save_monitor_unit'),
url(r'^delete_monitor_unit/', views.monitor_dashboard.delete_monitor_unit, name='delete_monitor_unit'),
url(r'^remove_saved_monitor_unit/', views.monitor_dashboard.remove_saved_monitor_unit, name='remove_saved_monitor_unit'),
url(r'^update_saved_monitor_unit_interval/', views.monitor_dashboard.update_saved_monitor_unit_interval, name='update_saved_monitor_unit_interval'),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if settings.PATH == '':
v_url = ''
else:
v_url = settings.PATH[1:] + '/'
urlpatterns = [# if you wish to maintain the un-prefixed URL's too
url(v_url, include(base_urlpatterns)),
#url(r'^subfolder/', include(base_urlpatterns))
]
| mit | -792,930,021,254,479,400 | 85.235294 | 164 | 0.724179 | false |
boreq/botnet | botnet/modules/builtin/meta.py | 1 | 2191 | from ...signals import _request_list_commands, _list_commands
from .. import BaseResponder
from ..lib import parse_command
class Meta(BaseResponder):
"""Displays basic info about this bot."""
ignore_help = False
ibip_repo = 'https://github.com/boreq/botnet'
def __init__(self, config):
super().__init__(config)
_list_commands.connect(self.on_list_commands)
def command_git(self, msg):
"""Alias for the IBIP identification.
Syntax: git
"""
self.ibip(msg)
@parse_command([('command_names', '*')])
def command_help(self, msg, args):
"""Sends a list of commands. If COMMAND is specified sends detailed help
in a private message.
Syntax: help [COMMAND ...]
"""
if len(args.command_names) == 0:
_request_list_commands.send(self, msg=msg, admin=False)
else:
super().command_help(msg)
@parse_command([('command_names', '*')])
def admin_command_help(self, msg, args):
"""Sends a list of commands. If COMMAND is specified sends detailed help
in a private message.
Syntax: help [COMMAND ...]
"""
if len(args.command_names) == 0:
_request_list_commands.send(self, msg=msg, admin=True)
else:
super().command_help(msg)
def ibip(self, msg):
"""Makes the bot identify itself as defined by The IRC Bot
Identification Protocol Standard.
"""
text = 'Reporting in! [Python] {ibip_repo} try {prefix}help'.format(
ibip_repo=self.ibip_repo,
prefix=self.config_get('command_prefix')
)
self.respond(msg, text)
def on_list_commands(self, sender, msg, admin, commands):
"""Sends a list of commands received from the Manager."""
if admin:
text = 'Supported admin commands: %s' % ', '.join(commands)
else:
text = 'Supported commands: %s' % ', '.join(commands)
self.respond(msg, text)
def handle_privmsg(self, msg):
# Handle IBIP:
if self.is_command(msg, 'bots', command_prefix='.'):
self.ibip(msg)
mod = Meta
| mit | -4,800,175,909,826,175,000 | 29.859155 | 80 | 0.581013 | false |
harikishen/addons-server | src/olympia/addons/tasks.py | 1 | 14975 | import hashlib
import os
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.db import transaction
from elasticsearch_dsl import Search
from PIL import Image
import olympia.core.logger
from olympia import amo
from olympia.addons.models import (
Addon, attach_tags, attach_translations, AppSupport, CompatOverride,
IncompatibleVersions, Persona, Preview)
from olympia.addons.indexers import AddonIndexer
from olympia.amo.celery import task
from olympia.amo.decorators import set_modified_on, write
from olympia.amo.helpers import user_media_path
from olympia.amo.storage_utils import rm_stored_dir
from olympia.amo.utils import cache_ns_key, ImageCheck, LocalFileStorage
from olympia.editors.models import RereviewQueueTheme
from olympia.lib.es.utils import index_objects
from olympia.tags.models import Tag
from olympia.versions.models import Version
log = olympia.core.logger.getLogger('z.task')
@task
@write
def version_changed(addon_id, **kw):
update_last_updated(addon_id)
update_appsupport([addon_id])
def update_last_updated(addon_id):
queries = Addon._last_updated_queries()
try:
addon = Addon.objects.get(pk=addon_id)
except Addon.DoesNotExist:
log.info('[1@None] Updating last updated for %s failed, no addon found'
% addon_id)
return
log.info('[1@None] Updating last updated for %s.' % addon_id)
if addon.is_persona():
q = 'personas'
elif addon.status == amo.STATUS_PUBLIC:
q = 'public'
else:
q = 'exp'
qs = queries[q].filter(pk=addon_id).using('default')
res = qs.values_list('id', 'last_updated')
if res:
pk, t = res[0]
Addon.objects.filter(pk=pk).update(last_updated=t)
@write
def update_appsupport(ids):
log.info("[%s@None] Updating appsupport for %s." % (len(ids), ids))
addons = Addon.objects.no_cache().filter(id__in=ids).no_transforms()
support = []
for addon in addons:
for app, appver in addon.compatible_apps.items():
if appver is None:
# Fake support for all version ranges.
min_, max_ = 0, 999999999999999999
else:
min_, max_ = appver.min.version_int, appver.max.version_int
support.append(AppSupport(addon=addon, app=app.id,
min=min_, max=max_))
if not support:
return
with transaction.atomic():
AppSupport.objects.filter(addon__id__in=ids).delete()
AppSupport.objects.bulk_create(support)
# All our updates were sql, so invalidate manually.
Addon.objects.invalidate(*addons)
@task
def delete_preview_files(id, **kw):
log.info('[1@None] Removing preview with id of %s.' % id)
p = Preview(id=id)
for f in (p.thumbnail_path, p.image_path):
try:
storage.delete(f)
except Exception, e:
log.error('Error deleting preview file (%s): %s' % (f, e))
@task(acks_late=True)
def index_addons(ids, **kw):
log.info('Indexing addons %s-%s. [%s]' % (ids[0], ids[-1], len(ids)))
transforms = (attach_tags, attach_translations)
index_objects(ids, Addon, AddonIndexer.extract_document,
kw.pop('index', None), transforms, Addon.unfiltered)
@task
def unindex_addons(ids, **kw):
for addon in ids:
log.info('Removing addon [%s] from search index.' % addon)
Addon.unindex(addon)
@task
def delete_persona_image(dst, **kw):
log.info('[1@None] Deleting persona image: %s.' % dst)
if not dst.startswith(user_media_path('addons')):
log.error("Someone tried deleting something they shouldn't: %s" % dst)
return
try:
storage.delete(dst)
except Exception, e:
log.error('Error deleting persona image: %s' % e)
@set_modified_on
def create_persona_preview_images(src, full_dst, **kw):
"""
Creates a 680x100 thumbnail used for the Persona preview and
a 32x32 thumbnail used for search suggestions/detail pages.
"""
log.info('[1@None] Resizing persona images: %s' % full_dst)
preview, full = amo.PERSONA_IMAGE_SIZES['header']
preview_w, preview_h = preview
orig_w, orig_h = full
with storage.open(src) as fp:
i_orig = i = Image.open(fp)
# Crop image from the right.
i = i.crop((orig_w - (preview_w * 2), 0, orig_w, orig_h))
# Resize preview.
i = i.resize(preview, Image.ANTIALIAS)
i.load()
with storage.open(full_dst[0], 'wb') as fp:
i.save(fp, 'png')
_, icon_size = amo.PERSONA_IMAGE_SIZES['icon']
icon_w, icon_h = icon_size
# Resize icon.
i = i_orig
i.load()
i = i.crop((orig_w - (preview_h * 2), 0, orig_w, orig_h))
i = i.resize(icon_size, Image.ANTIALIAS)
i.load()
with storage.open(full_dst[1], 'wb') as fp:
i.save(fp, 'png')
return True
@set_modified_on
def save_persona_image(src, full_dst, **kw):
"""Creates a PNG of a Persona header/footer image."""
log.info('[1@None] Saving persona image: %s' % full_dst)
img = ImageCheck(storage.open(src))
if not img.is_image():
log.error('Not an image: %s' % src, exc_info=True)
return
with storage.open(src, 'rb') as fp:
i = Image.open(fp)
with storage.open(full_dst, 'wb') as fp:
i.save(fp, 'png')
return True
@task
def update_incompatible_appversions(data, **kw):
"""Updates the incompatible_versions table for this version."""
log.info('Updating incompatible_versions for %s versions.' % len(data))
addon_ids = set()
for version_id in data:
# This is here to handle both post_save and post_delete hooks.
IncompatibleVersions.objects.filter(version=version_id).delete()
try:
version = Version.objects.get(pk=version_id)
except Version.DoesNotExist:
log.info('Version ID [%d] not found. Incompatible versions were '
'cleared.' % version_id)
return
addon_ids.add(version.addon_id)
try:
compat = CompatOverride.objects.get(addon=version.addon)
except CompatOverride.DoesNotExist:
log.info('Compat override for addon with version ID [%d] not '
'found. Incompatible versions were cleared.' % version_id)
return
app_ranges = []
ranges = compat.collapsed_ranges()
for range in ranges:
if range.min == '0' and range.max == '*':
# Wildcard range, add all app ranges
app_ranges.extend(range.apps)
else:
# Since we can't rely on add-on version numbers, get the min
# and max ID values and find versions whose ID is within those
# ranges, being careful with wildcards.
min_id = max_id = None
if range.min == '0':
versions = (Version.objects.filter(addon=version.addon_id)
.order_by('id')
.values_list('id', flat=True)[:1])
if versions:
min_id = versions[0]
else:
try:
min_id = Version.objects.get(addon=version.addon_id,
version=range.min).id
except Version.DoesNotExist:
pass
if range.max == '*':
versions = (Version.objects.filter(addon=version.addon_id)
.order_by('-id')
.values_list('id', flat=True)[:1])
if versions:
max_id = versions[0]
else:
try:
max_id = Version.objects.get(addon=version.addon_id,
version=range.max).id
except Version.DoesNotExist:
pass
if min_id and max_id:
if min_id <= version.id <= max_id:
app_ranges.extend(range.apps)
for app_range in app_ranges:
IncompatibleVersions.objects.create(version=version,
app=app_range.app.id,
min_app_version=app_range.min,
max_app_version=app_range.max)
log.info('Added incompatible version for version ID [%d]: '
'app:%d, %s -> %s' % (version_id, app_range.app.id,
app_range.min, app_range.max))
# Increment namespace cache of compat versions.
for addon_id in addon_ids:
cache_ns_key('d2c-versions:%s' % addon_id, increment=True)
def make_checksum(header_path, footer_path):
ls = LocalFileStorage()
footer = footer_path and ls._open(footer_path).read() or ''
raw_checksum = ls._open(header_path).read() + footer
return hashlib.sha224(raw_checksum).hexdigest()
def theme_checksum(theme, **kw):
theme.checksum = make_checksum(theme.header_path, theme.footer_path)
dupe_personas = Persona.objects.filter(checksum=theme.checksum)
if dupe_personas.exists():
theme.dupe_persona = dupe_personas[0]
theme.save()
def rereviewqueuetheme_checksum(rqt, **kw):
"""Check for possible duplicate theme images."""
dupe_personas = Persona.objects.filter(
checksum=make_checksum(rqt.header_path or rqt.theme.header_path,
rqt.footer_path or rqt.theme.footer_path))
if dupe_personas.exists():
rqt.dupe_persona = dupe_personas[0]
rqt.save()
@task
@write
def save_theme(header, footer, addon, **kw):
"""Save theme image and calculates checksum after theme save."""
dst_root = os.path.join(user_media_path('addons'), str(addon.id))
header = os.path.join(settings.TMP_PATH, 'persona_header', header)
header_dst = os.path.join(dst_root, 'header.png')
if footer:
footer = os.path.join(settings.TMP_PATH, 'persona_footer', footer)
footer_dst = os.path.join(dst_root, 'footer.png')
try:
save_persona_image(src=header, full_dst=header_dst)
if footer:
save_persona_image(src=footer, full_dst=footer_dst)
create_persona_preview_images(
src=header, full_dst=[os.path.join(dst_root, 'preview.png'),
os.path.join(dst_root, 'icon.png')],
set_modified_on=[addon])
theme_checksum(addon.persona)
except IOError:
addon.delete()
raise
@task
@write
def save_theme_reupload(header, footer, addon, **kw):
header_dst = None
footer_dst = None
dst_root = os.path.join(user_media_path('addons'), str(addon.id))
try:
if header:
header = os.path.join(settings.TMP_PATH, 'persona_header', header)
header_dst = os.path.join(dst_root, 'pending_header.png')
save_persona_image(src=header, full_dst=header_dst)
if footer:
footer = os.path.join(settings.TMP_PATH, 'persona_footer', footer)
footer_dst = os.path.join(dst_root, 'pending_footer.png')
save_persona_image(src=footer, full_dst=footer_dst)
except IOError as e:
log.error(str(e))
raise
if header_dst or footer_dst:
theme = addon.persona
header = 'pending_header.png' if header_dst else theme.header
# Theme footer is optional, but can't be None.
footer = theme.footer or ''
if footer_dst:
footer = 'pending_footer.png'
# Store pending header and/or footer file paths for review.
RereviewQueueTheme.objects.filter(theme=theme).delete()
rqt = RereviewQueueTheme(theme=theme, header=header, footer=footer)
rereviewqueuetheme_checksum(rqt=rqt)
rqt.save()
@task
@write
def calc_checksum(theme_id, **kw):
"""For migration 596."""
lfs = LocalFileStorage()
theme = Persona.objects.get(id=theme_id)
header = theme.header_path
footer = theme.footer_path
# Delete invalid themes that are not images (e.g. PDF, EXE).
try:
Image.open(header)
Image.open(footer)
except IOError:
log.info('Deleting invalid theme [%s] (header: %s) (footer: %s)' %
(theme.addon.id, header, footer))
theme.addon.delete()
theme.delete()
rm_stored_dir(header.replace('header.png', ''), storage=lfs)
return
# Calculate checksum and save.
try:
theme.checksum = make_checksum(header, footer)
theme.save()
except IOError as e:
log.error(str(e))
@task
@write # To bypass cache and use the primary replica.
def find_inconsistencies_between_es_and_db(ids, **kw):
length = len(ids)
log.info(
'Searching for inconsistencies between db and es %d-%d [%d].',
ids[0], ids[-1], length)
db_addons = Addon.unfiltered.in_bulk(ids)
es_addons = Search(
doc_type=AddonIndexer.get_doctype_name(),
index=AddonIndexer.get_index_alias(),
using=amo.search.get_es()).filter('ids', values=ids)[:length].execute()
es_addons = es_addons
db_len = len(db_addons)
es_len = len(es_addons)
if db_len != es_len:
log.info('Inconsistency found: %d in db vs %d in es.',
db_len, es_len)
for result in es_addons.hits.hits:
pk = result['_source']['id']
db_modified = db_addons[pk].modified.isoformat()
es_modified = result['_source']['modified']
if db_modified != es_modified:
log.info('Inconsistency found for addon %d: '
'modified is %s in db vs %s in es.',
pk, db_modified, es_modified)
db_status = db_addons[pk].status
es_status = result['_source']['status']
if db_status != es_status:
log.info('Inconsistency found for addon %d: '
'status is %s in db vs %s in es.',
pk, db_status, es_status)
@task
@write
def add_firefox57_tag(ids, **kw):
"""Add firefox57 tag to addons with the specified ids."""
log.info(
'Adding firefox57 tag to addons %d-%d [%d].',
ids[0], ids[-1], len(ids))
addons = Addon.objects.filter(id__in=ids)
for addon in addons:
# This will create a couple extra queries to check for tag/addontag
# existence, and then trigger update_tag_stat tasks. But the
# alternative is adding activity log manually, making sure we don't
# add duplicate tags, manually updating the tag stats, so it's ok for
# a one-off task.
Tag(tag_text='firefox57').save_tag(addon)
| bsd-3-clause | 1,826,447,814,899,760,000 | 34.070258 | 79 | 0.585576 | false |
kevinrigney/PlaylistDatabase | youtube_search.py | 1 | 4208 | #!/usr/bin/env python3
import httplib2
import os
import sys
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client.tools import argparser, run_flow
class YoutubeSearcher():
# The CLIENT_SECRETS_FILE variable specifies the name of a file that contains
# the OAuth 2.0 information for this application, including its client_id and
# client_secret. You can acquire an OAuth 2.0 client ID and client secret from
# the Google Developers Console at
# https://console.developers.google.com/.
# Please ensure that you have enabled the YouTube Data API for your project.
# For more information about using OAuth2 to access the YouTube Data API, see:
# https://developers.google.com/youtube/v3/guides/authentication
# For more information about the client_secrets.json file format, see:
# https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
CLIENT_SECRETS_FILE = "client_secrets.json"
# This variable defines a message to display if the CLIENT_SECRETS_FILE is
# missing.
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the Developers Console
https://console.developers.google.com/
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
CLIENT_SECRETS_FILE))
# This OAuth 2.0 access scope allows for full read/write access to the
# authenticated user's account.
YOUTUBE_READ_WRITE_SCOPE = "https://www.googleapis.com/auth/youtube"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
def __init__(self):
flow = flow_from_clientsecrets(self.CLIENT_SECRETS_FILE,
message=self.MISSING_CLIENT_SECRETS_MESSAGE,
scope=self.YOUTUBE_READ_WRITE_SCOPE)
storage = Storage("ytpl-oauth2.json")
credentials = storage.get()
if credentials is None or credentials.invalid:
flags = argparser.parse_args()
credentials = run_flow(flow, storage, flags)
self.youtube = build(self.YOUTUBE_API_SERVICE_NAME, self.YOUTUBE_API_VERSION,
http=credentials.authorize(httplib2.Http()))
def get_most_viewed_link(self,query,max_results=5):
videos = self.youtube_search(query,max_results)
try:
video = videos[0]
return ('https://www.youtube.com/watch?v='+video,video)
except IndexError:
return ('','')
def youtube_search(self,query,max_results=5):
# Call the search.list method to retrieve results matching the specified
# query term.
search_response = self.youtube.search().list(
q=query,
part="id",
maxResults=max_results,
order="relevance"
).execute()
videos = []
# Add each result to the appropriate list, and then display the lists of
# matching videos, channels, and playlists.
for search_result in search_response.get("items", []):
if search_result["id"]["kind"] == "youtube#video":
#print(search_result)
videos.append(search_result["id"]["videoId"])
#print("Videos:\n", "\n".join(videos), "\n")
return videos
def is_video_valid(self,video_id):
# Check if a video is still valid.
# (make sure it hasn't been deleted)
# The part is "id" because it has a quota cost of 0
search_response = self.youtube.videos().list(
id=video_id,
part="id"
).execute()
return search_response['pageInfo']['totalResults'] > 0
if __name__ == "__main__":
argparser.add_argument("--q", help="Search term", default="Google")
argparser.add_argument("--max-results", help="Max results", default=25)
args = argparser.parse_args()
searcher = YoutubeSearcher()
try:
video = searcher.get_most_viewed_link(args.q)
print("Video: "+video)
except HttpError as e:
print("An HTTP error %d occurred:\n%s" % (e.resp.status, e.content))
| mit | 6,983,723,532,908,328,000 | 34.361345 | 86 | 0.694867 | false |
south-coast-science/scs_dev | src/scs_dev/control_receiver.py | 1 | 7257 | #!/usr/bin/env python3
"""
Created on 17 Apr 2017
@author: Bruno Beloff ([email protected])
DESCRIPTION
The function of the control_receiver utility is execute commands received over a messaging topic. In addition to
enabling secure remote management, the utility provides a secure challenge-response facility.
A typical South Coast Science device is provided with a messaging topic whose purpose is to enable bidirectional
command-and-response communications between the device and one or more remote management systems. Commands
are in the form of a specific JSON format, which is validated as follows:
* the message must identify the device as the recipient
* the digest in the incoming message matches the digest computed by the device
* the command must be listed in the device's ~/SCS/cmd/ directory, or be "?"
The digest is computed using a shared secret generated by the scs_mfr/shared_secret utility.
If validated, the control_receiver utility executes the command, then publishes a receipt message which includes:
* the command stdout
* the command stderr
* the command return code
* the original message digest
* a new message digest
Entries in ~/SCS/cmd/ are typically symbolic links to commands that are implemented elsewhere, either by the operating
system, or by South Coast Science packages.
It is the responsibility of the device administrator to mange the ~/SCS/cmd/ directory. Care should be taken to exclude
commands that:
* could cause harm to the system
* require an interactive mode
* require root privileges
* can change the contents of the ~/SCS/cmd/ directory
SYNOPSIS
control_receiver.py [-r] [-e] [-v]
EXAMPLES
/home/pi/SCS/scs_dev/src/scs_dev/aws_topic_subscriber.py -cX -s /home/pi/SCS/pipes/mqtt_control_subscription.uds | \
/home/pi/SCS/scs_dev/src/scs_dev/control_receiver.py -r -v | \
/home/pi/SCS/scs_dev/src/scs_dev/aws_topic_publisher.py -v -cX -p /home/pi/SCS/pipes/mqtt_publication.uds
FILES
~/SCS/cmd/*
~/SCS/conf/system_id.json
~/SCS/conf/shared_secret.json
DOCUMENT EXAMPLE - REQUEST
{"/orgs/south-coast-science-dev/development/device/alpha-pi-eng-000006/control":
{"tag": "bruno", "attn": "scs-ap1-6", "rec": "2018-04-04T14:41:11.872+01:00", "cmd_tokens": ["?"],
"digest": "bf682976cb45d889207bf3e3b4a6e12336859a93d7023b8454514"}}
DOCUMENT EXAMPLE - RESPONSE
{"/orgs/south-coast-science-dev/development/device/alpha-pi-eng-000006/control":
{"tag": "scs-ap1-6", "rec": "2018-04-04T13:41:59.521+00:00",
"cmd": {"cmd": "?", "params": [],
"stdout": ["[\"afe_baseline\", \"afe_calib\", \"opc_power\", \"ps\", \"schedule\", \"shared_secret\"]"],
"stderr": [], "ret": 0},
"omd": "40ef7a9c0f70033bbe21827ed25286b448a5ad3ace9b16f44f3d94da6a89ab25",
"digest": "597f8de3852f1067f52f126398777204c378e8f5d30bad6d8d99ee536"}}
SEE ALSO
scs_analysis/aws_mqtt_control
scs_analysis/osio_mqtt_control
scs_mfr/shared_secret
"""
import json
import sys
import time
from scs_core.control.command import Command
from scs_core.control.control_datum import ControlDatum
from scs_core.control.control_receipt import ControlReceipt
from scs_core.data.datetime import LocalizedDatetime
from scs_core.data.json import JSONify
from scs_core.sys.shared_secret import SharedSecret
from scs_core.sys.signalled_exit import SignalledExit
from scs_core.sys.system_id import SystemID
from scs_dev.cmd.cmd_control_receiver import CmdControlReceiver
from scs_host.sys.host import Host
# --------------------------------------------------------------------------------------------------------------------
if __name__ == '__main__':
# ----------------------------------------------------------------------------------------------------------------
# config...
deferred_commands = ('shutdown', 'reboot', 'restart')
# ----------------------------------------------------------------------------------------------------------------
# cmd...
cmd = CmdControlReceiver()
if cmd.verbose:
print("control_receiver: %s" % cmd, file=sys.stderr)
# ------------------------------------------------------------------------------------------------------------
# resources...
# SystemID...
system_id = SystemID.load(Host)
if system_id is None:
print("control_receiver: SystemID not available.", file=sys.stderr)
exit(1)
if cmd.verbose:
print("control_receiver: %s" % system_id, file=sys.stderr)
# SharedSecret...
secret = SharedSecret.load(Host)
if secret is None:
print("control_receiver: SharedSecret not available.", file=sys.stderr)
exit(1)
if cmd.verbose:
print("control_receiver: %s" % secret, file=sys.stderr)
sys.stderr.flush()
system_tag = system_id.message_tag()
key = secret.key
try:
# ------------------------------------------------------------------------------------------------------------
# run...
# signal handler...
SignalledExit.construct("control_receiver", cmd.verbose)
for line in sys.stdin:
# control...
try:
jdict = json.loads(line)
except ValueError:
continue
try:
datum = ControlDatum.construct_from_jdict(jdict)
except TypeError:
continue
if datum.attn != system_tag:
continue
if cmd.verbose:
print("control_receiver: %s" % datum, file=sys.stderr)
sys.stderr.flush()
if not datum.is_valid(key):
print("control_receiver: invalid digest: %s" % datum, file=sys.stderr)
sys.stderr.flush()
continue
if cmd.echo:
print(JSONify.dumps(datum))
sys.stdout.flush()
# command...
command = Command.construct_from_tokens(datum.cmd_tokens)
if command.cmd is not None and not command.is_valid(Host):
command.error("invalid command")
# execute immediate commands...
elif command.cmd not in deferred_commands:
command.execute(Host, datum.timeout)
# receipt...
if cmd.receipt:
now = LocalizedDatetime.now().utc()
receipt = ControlReceipt.construct_from_datum(datum, now, command, key)
print(JSONify.dumps(receipt))
sys.stdout.flush()
if cmd.verbose:
print("control_receiver: %s" % receipt, file=sys.stderr)
sys.stderr.flush()
# execute deferred commands...
if command.cmd in deferred_commands:
time.sleep(10.0) # wait, hoping that the receipt is sent
command.execute(Host, datum.timeout)
# ----------------------------------------------------------------------------------------------------------------
# end...
except ConnectionError as ex:
print("control_receiver: %s" % ex, file=sys.stderr)
except (KeyboardInterrupt, SystemExit):
pass
finally:
if cmd and cmd.verbose:
print("control_receiver: finishing", file=sys.stderr)
| mit | 8,452,370,803,216,645,000 | 32.911215 | 119 | 0.592945 | false |
afraser/CellProfiler-Analyst | cpa/dirichletintegrate.py | 1 | 6480 | from numpy import *
from scipy.integrate import quadrature, romberg, fixed_quad
from scipy.special import gammaln, betaln, digamma, polygamma, betainc, gamma
import pdb
from hypergeom import hyper3F2regularizedZ1, hyper3F2Z1, hyper3F2aZ1
def dirichlet_integrate(alpha):
normalizer = exp(sum(gammaln(alpha)) - gammaln(sum(alpha)))
def f_recur(x, idx, upper, vals):
if idx == 1:
# base case.
# set values for last two components
vals[1] = x
vals[0] = 1.0 - sum(vals[1:])
# compute Dirichlet value
print vals.T, prod(vals ** (alpha - 1)) , normalizer, alpha
return prod(vals.T ** (alpha - 1)) / normalizer
else:
vals[idx] = x
split = alpha[idx-1] / sum(alpha)
if (split < upper - x):
return romberg(f_recur, 0, split, args=(idx - 1, upper - x, vals), vec_func=False) + \
romberg(f_recur, split, upper - x, args=(idx - 1, upper - x, vals), vec_func=False)
else:
return romberg(f_recur, 0, upper - x, args=(idx - 1, upper - x, vals), vec_func=False)
split = alpha[-1] / sum(alpha)
print alpha / sum(alpha)
return romberg(f_recur, 0, split, args=(len(alpha) - 1, 1.0, zeros((len(alpha), 1), float64)), vec_func=False) + \
romberg(f_recur, split, 1, args=(len(alpha) - 1, 1.0, zeros((len(alpha), 1), float64)), vec_func=False)
def dirichlet_integrate_near0(alpha):
normalizer = exp(sum(gammaln(alpha)) - gammaln(sum(alpha)))
K = len(alpha)
def f_recur(x, idx, vals):
if idx == K - 2:
# base case.
# set values for last two components
vals[K - 2] = x
vals[K - 1] = 1.0 - sum(vals[0:K-1])
# print vals, prod(vals ** (alpha - 1)) / normalizer, normalizer
for v in vals[1:]:
assert v <= vals[0]+0.001
# compute Dirichlet value
return prod(vals.T ** (alpha - 1)) / normalizer
else:
vals[idx] = x
# we have to fulfill three requirements:
# vals[i] > 0 for all i
# vals[0] >= vals[i] for all i
# vals[i] sum to 1
# how much weight is left to assign?
remaining = 1.0 - sum(vals[:(idx+1)])
# require vals[i] > 0, and vals[0] >= vals[i]
lower_bound = max(0.0, remaining - vals[0] * (K - idx - 2))
upper_bound = min(remaining, vals[0])
assert lower_bound <= upper_bound+0.001
v = romberg(f_recur, lower_bound, upper_bound, args=(idx + 1, vals), vec_func=False)
return v
return romberg(f_recur, 1.0 / len(alpha), 1, args=(0, zeros((len(alpha), 1), float64)), vec_func=False)
def dirichlet_integrate_zero_enriched(alpha, base_level):
normalizer = exp(sum(gammaln(alpha)) - gammaln(sum(alpha)))
K = len(alpha)
def f_recur(x, idx, vals, remaining):
if idx == K - 2:
# base case.
# set values for last two components
vals[K - 2] = x
vals[K - 1] = remaining - x
# compute Dirichlet value
return prod(vals.T ** (alpha - 1)) / normalizer
else:
vals[idx] = x
remaining = remaining - x
v = romberg(f_recur, 0, remaining, args=(idx + 1, vals, remaining), vec_func=False)
return v
return romberg(f_recur, base_level, 1, args=(0, zeros((len(alpha), 1), float64), 1.0), vec_func=False)
def integrate_splits(prior, posterior):
splits = [finfo(float64).eps, 1.0 - finfo(float64).eps, prior[0] / sum(prior),
prior[1] / sum(prior), posterior[0] / sum(posterior),
posterior[1] / sum (posterior)]
splits.sort()
return splits
def integrate(f, splits):
return sum([romberg(f, lo, hi, vec_func=True, tol=1e-4, divmax=10) for lo, hi in zip(splits[:-1], splits[1:])])
def integrateold(f, splits):
return sum([fixed_quad(f, lo, hi, n=100)[0] for lo, hi in zip(splits[:-1], splits[1:])])
def pdf_cdf_prod(x, prior, posterior):
lnCDF = log(betainc(prior[0], prior[1], x))
lnPDF = (posterior[0] - 1) * log(x) + (posterior[1] - 1) * log(1 - x) - betaln(posterior[0], posterior[1])
return exp(lnCDF + lnPDF)
def beta_enriched(prior, posterior):
# def f(x):
# return beta.cdf(x, prior[0], prior[1]) * beta.pdf(x, posterior[0], posterior[1])
# def g(x):
# return beta.pdf(x, posterior[0], posterior[1])
# def h(x):
# return pdf_cdf_prod(x, prior, posterior)
# # compute by integration
# splits = integrate_splits(prior, posterior)
# v = integrate(f, splits) / integrate(g, splits)
# use closed form
a = prior[0]
b = prior[1]
c = posterior[0]
d = posterior[1]
# See Integration.mathetmatica
# This would be better if we computed the log of the
# hypergeometric function, but I don't think that's generally
# possible.
hyper = hyper3F2aZ1(a, 1-b, a+c, a+c+d)
scale = exp(gammaln(a) + gammaln(a+c) + gammaln(d) - gammaln(1+a) - gammaln(a+c+d) - betaln(a,b) - betaln(c,d))
if isnan(hyper * scale):
# This can happen if hyper and scale are 0 and inf (or vice versa).
if prior[0] / sum(prior) > posterior[0] / sum(posterior):
return 0.0
return 1.0
return clip(hyper * scale, 0, 1)
def score(prior, counts):
''' score a well based on the prior fit to the data and the observed counts '''
assert prior.shape==counts.shape, "dirichletintegrate.score: array shapes do not match: "+str(prior.shape)+' and '+str(counts.shape)
K = len(prior)
posterior = prior + counts
def score_idx(idx):
prior_a = prior[idx]
prior_b = sum(prior) - prior_a
posterior_a = posterior[idx]
posterior_b = sum(posterior) - posterior_a
return beta_enriched((prior_a, prior_b), (posterior_a, posterior_b))
return [score_idx(i) for i in range(K)]
def logit(p):
return log2(p) - log2(1-p)
if __name__ == '__main__':
from polyafit import fit_to_data_infile
alpha, converged, wellnums, wellcounts = fit_to_data_infile('PBcounts.txt')
print "Fit alpha:", alpha, "\tconverged:", converged
for idx, wellnum in enumerate(wellnums):
print wellnum, "\t", "\t".join([str(logit(v)) for v in score(alpha, wellcounts[idx])]), "\t", "\t".join([str(v) for v in wellcounts[idx]])
| gpl-2.0 | -7,962,094,248,256,210,000 | 39.754717 | 146 | 0.572685 | false |
leathersole/midi-accordion-fancy | src/main/player.py | 1 | 1967 | #!/bin/env python
import pygame.midi
import soundvalue
class Player:
def __init__(self):
instrument = 22
port = 2
self.button2sound = {'a':(60,61), 's':(62,63), 'd':(64,65) }
self.buttons = self.button2sound.keys()
self.volume = 127
pygame.midi.init()
self.midiOutput = pygame.midi.Output(port, 0)
self.midiOutput.set_instrument(instrument)
self.currently_playing = {k : False for k in self.button2sound.iterkeys()}
def play(self, key):
note = self.button2sound[key][0]
self.midiOutput.note_on(note,self.volume)
self.currently_playing[key] = True
def stop(self, key):
note = self.button2sound[key][0]
self.midiOutput.note_off(note,self.volume)
self.currently_playing[key] = False
def quit(self):
pygame.midi.quit()
if __name__ == "__main__":
p = Player()
print(p.button2sound)
#import pygame
#import pygame.midi
#
#pygame.init()
#pygame.midi.init()
#
#pygame.display.set_mode((640,480))
#
#instrument = 22
#note = 74
#volume = 127
#port = 2
#
#midiOutput = pygame.midi.Output(port, 0)
#midiOutput.set_instrument(instrument)
#
#finished = False
#
#key2sound = {'a':60, 's':62, 'd':64 }
#
#print "Press q to quit..."
#currently_playing = {k : False for k in key2sound.iterkeys()}
#
#while not finished:
#
# event = pygame.event.wait()
#
# if event.type == pygame.QUIT:
# finished = True
# elif event.type in (pygame.KEYDOWN,pygame.KEYUP):
# key = pygame.key.name(event.key)
# if key == 'q':
# finished = True
#
# if key in key2sound:
# if event.type == pygame.KEYDOWN:
# note = key2sound[key]
# midiOutput.note_on(note,volume)
# currently_playing[key] = True
# elif event.type == pygame.KEYUP:
# midiOutput.note_off(note,volume)
# currently_playing[key] = False
#
#del midiOutput
#pygame.midi.quit()
#
#print "-- END --"
| gpl-3.0 | 1,122,424,234,700,175,700 | 21.352273 | 82 | 0.605999 | false |
tristan-c/massive-octo-tribble | massive/views.py | 1 | 3540 | import uuid
import os
from flask_restful import Resource, reqparse
from flask_login import login_required
from flask import redirect, send_file, g, url_for
from massive import api, app
from massive.models import *
from massive.utils import *
from io import BytesIO
class Resource(Resource):
method_decorators = [login_required]
@app.route('/')
def index():
if g.user is not None and g.user.is_authenticated:
return redirect('/index.html')
else:
return redirect("/login")
parser = reqparse.RequestParser()
parser.add_argument('url', type=str)
parser.add_argument('tags', type=str, default=None)
class links(Resource):
def get(self):
user = User.query.get(g.user.id)
links = Link.query.join(User).filter(User.email == user.email)
return [link.dump() for link in links]
def post(self, linkId=None):
args = parser.parse_args()
user = User.query.get(g.user.id)
if linkId:
link = Link.query.get(id=linkId)
if not link:
return "no link found", 404
#taglist = [t.name for t in link.tags]
# for tag in args['tags']:
# if tag not in taglist:
# db_tag = Tags.get(name=tag)
# if not db_tag:
# db_tag = Tags(name=tag)
# link.tags.add(db_tag)
return link.dump()
url = args['url']
tags = args['tags']
#prepend if no protocole specified
if url.find("http://") == -1 and url.find("https://") == -1:
url = "http://%s" % url
if Link.query.filter_by(url=url, user_id=user.id).first():
return "already in db", 400
if tags:
tags = tags.split(",")
link = save_link(
get_page_title(url),
url,
tags,
user
)
return link.dump()
def delete(self, linkId=None):
if linkId == None:
return "no link provided", 400
link = Link.query.get(linkId)
if not link:
return "no link found", 404
#delete favicon
if link.favicon:
favicon_path = os.path.join(app.config['FAVICON_REPO'],link.favicon)
try:
os.remove(favicon_path)
except Exception as e:
app.logger.warning("error while trying to remove a favicon")
app.logger.warning(e)
db.session.delete(link)
db.session.commit()
return ""
api.add_resource(links, '/links', '/links/<string:linkId>')
@app.route('/ico/<icoId>')
def get_avatar(icoId=None):
file_path = os.path.join(app.config['FAVICON_REPO'],icoId)
if os.path.isfile(file_path):
return send_file(file_path, as_attachment=True)
else:
return "no favicon found",404
def save_link(title, url, tags=[], user=None):
if not title:
title = url.split('/')[-1]
iconfile_name = "%s.ico" % str(uuid.uuid4())
favicon = get_page_favicon(url,iconfile_name)
link = Link(
title=title,
url=url,
favicon=iconfile_name,
#tags=db_tags,
user=user
)
for tag in tags:
db_tag = Tags.query.filter_by(name=tag).first()
if not db_tag:
db_tag = Tags(name=tag)
db.session.add(db_tag)
link.tags.append(db_tag)
if favicon:
link.favicon = favicon
db.session.add(link)
db.session.commit()
return link
| bsd-2-clause | 8,048,658,675,714,831,000 | 24.467626 | 80 | 0.554237 | false |
branden/dcos | packages/dcos-net/extra/dcos-net-setup.py | 1 | 1380 | #!/opt/mesosphere/bin/python
"""
The script allows to add network interfaces and ip addresses multiple times
ip command returns 2 as exit code if interface or ipaddr already exists [1]
dcos-net-setup.py checks output of ip command and returns success exit code [2]
[1] ExecStartPre=-/usr/bin/ip link add name type dummy
[2] ExecStartPre=/path/dcos-net-setup.py ip link add name type dummy
Also the script prevents from duplicating iptables rules [3]
[3] ExecStartPre=/path/dcos-net-setup.py iptables --wait -A FORWARD -j ACCEPT
"""
import subprocess
import sys
def main():
if sys.argv[1:4] in [['ip', 'link', 'add'], ['ip', 'addr', 'add'], ['ip', '-6', 'addr']]:
result = subprocess.run(sys.argv[1:], stderr=subprocess.PIPE)
sys.stderr.buffer.write(result.stderr)
if result.stderr.strip().endswith(b'File exists'):
result.returncode = 0
elif sys.argv[1] == 'iptables':
# check whether a rule matching the specification does exist
argv = ['-C' if arg in ['-A', '-I'] else arg for arg in sys.argv[1:]]
result = subprocess.run(argv)
if result.returncode != 0:
# if it doesn't exist append or insert that rules
result = subprocess.run(sys.argv[1:])
else:
result = subprocess.run(sys.argv[1:])
sys.exit(result.returncode)
if __name__ == "__main__":
main()
| apache-2.0 | -2,490,736,207,287,210,000 | 34.384615 | 93 | 0.65 | false |
itsMichael/uair-pro | app/launcher.py | 1 | 19189 | #! /usr/bin/env python2
# -*- coding: utf-8 -*-
import os
import sys
import random
import socket
import subprocess
import signal
import hashlib
import subprocess
from functions import lt
from langs import langs
from loaders import load_config, save_config
ROOT_PATH=os.path.dirname(__file__)
ICON_PATH=os.path.join(ROOT_PATH, "static/launcher.png")
PIDFILE_PATH=os.path.expanduser("~/.uair.pid")
DEFAULT_IMAGES_PATH=os.path.expanduser("~/Pictures")
try:
import gtk
CLIMODE=False
if len(sys.argv)>1 and sys.argv[1]=="cli":
CLIMODE=True
sys.argv=sys.argv[1:]
except:
CLIMODE=True
CLIMODE = False
def check_pidfile():
if os.path.exists(PIDFILE_PATH):
return int(open(PIDFILE_PATH).read())
else:
return False
def create_pidfile(pid):
ff=open(PIDFILE_PATH, "w")
ff.write(str(pid))
ff.close()
def delete_pidfile():
if check_pidfile():
os.remove(PIDFILE_PATH)
return True
else:
return False
def remove_orphaned_pidfile(pid):
if not os.path.exists("/proc/%s" % str(pid)):
result=delete_pidfile()
if result:
print("Removed orphaned pid file")
def generate_password(length=5):
alphabet = "abcdefghijkmnoprstuwxyz1234567890"
pwd = ''
for count in range(length):
for x in random.choice(alphabet):
pwd+=x
return pwd
def hash_password(password):
from main import SECRET_KEY
#hash password
hashed=hashlib.md5()
hashed.update(SECRET_KEY+password)
return hashed.hexdigest()
def get_local_ip_address():
import socket
try:
s = socket.socket()
s.connect(('google.com', 80))
ip=s.getsockname()[0]
except:
ip=""
if ip:
return ip
else:
return "127.0.0.1"
def get_global_ip_address():
import urllib2
try:
ip=urllib2.urlopen('http://icanhazip.com').read()
ip=ip.strip()
except:
ip=""
if ip:
return ip
else:
return "127.0.0.1"
def start_server(config):
#Dont start server when is running
if check_pidfile():
print("Server already started.")
return
#create server path
path=os.path.join(ROOT_PATH, "main.pyc")
#Start server
cmd=["nohup", "python", path,"launch"]
server=subprocess.Popen(cmd)
#create pid file diable start button
create_pidfile(server.pid)
return server
def stop_server(config):
#stop www server by sending SIGINT signal
pid=check_pidfile()
if pid:
#remove pid file
delete_pidfile()
#Kill process
try:
os.kill(pid, signal.SIGTERM)
except OSError:pass
print("Web server stopped")
ddd = subprocess.Popen("/usr/bin/notify-send Server Stopped", shell=True)
ddd.poll()
return True
else:
print("Server not started.")
return False
######################
# CLI Launcher
######################
if CLIMODE:
#ignore not run as script
if not __name__=="__main__":
exit(0)
#ignore arguments
if len(sys.argv)<2:
print("Usage: sudo python launcher.pyc start/stop")
print("launcher.pyc password <newpassword>")
print("launcher.pyc port <new port>")
exit(0)
#ignore others commands
if sys.argv[1] not in ["start", "stop", "password", "port"]:
print("Invalid command")
exit(0)
#remove pid
remove_orphaned_pidfile(check_pidfile())
#load config
try:
config=load_config()
except: pass
if sys.argv[1]=="start":
if check_pidfile():
print("Server already started.")
exit(0)
gip=get_global_ip_address()
lip=get_local_ip_address()
#print addresses
print("Local IP: %s" % lip)
print("Public IP: %s" % gip)
#gen passwords
pass1=generate_password()
pass2=generate_password()
config["gen_password"]=hash_password(pass1)
config["gen_password_shared"]=hash_password(pass2)
print("Login password:%s" % pass1)
print("Shared password:%s" % pass2)
config["local_ip"]=lip
config["global_ip"]=gip
save_config(config)
start_server(config)
config["status"]=1
save_config(config)
if sys.argv[1]=="stop":
done=stop_server(config)
if done:
config["status"]=0
save_config(config)
if sys.argv[1]=="password" and len(sys.argv)>2:
config["password"]=hash_password(sys.argv[2].strip())
save_config(config)
print("New password set")
if sys.argv[1]=="port":
if len(sys.argv)>2:
try:
config["port"]=int(sys.argv[2].strip())
save_config(config)
print("Port set to %s "% int(sys.argv[2].strip()))
except:pass
else:
print("Current port: %s "% config["port"])
#exit
exit(0)
class MainWindow(gtk.Window):
def __init__(self):
gtk.Window.__init__(self)
self.set_title("U-Air Launcher")
self.set_icon_from_file(ICON_PATH)
self.set_resizable(False)
self.set_size_request(440, 320)
self.set_border_width(20)
self.set_position(gtk.WIN_POS_CENTER)
#load config
self.config=load_config()
save_config(self.config)
#www server process
self.server=None
#get lang from config
self.lang=self.config.get("lang", "en")
#connect close event
self.connect("destroy", self.close_app)
self.fixed = gtk.Fixed()
self.label_status = gtk.Label("Status:")
self.label_status.set_text(lt("Status", self.lang)+":")
#local IP label
self.label_local_ip = gtk.Label("Local IP:")
label=lt("Local IP", self.lang)+": "+self.config["local_ip"]
label+=":"+str(self.config["port"])
self.label_local_ip.set_text(label)
self.label_public_ip = gtk.Label("Public IP:")
label=lt("Public IP", self.lang)+": "+self.config["global_ip"]+":"+str(self.config["port"])
self.label_public_ip.set_text(label)
self.label_gen_password = gtk.Label("Login password:")
self.label_gen_password.set_text(lt("Login password", self.lang)+":")
self.label_gen_password_shared = gtk.Label("Shared password:")
self.label_gen_password_shared.set_text(lt("Shared password", self.lang)+":")
self.label_set_gen_password = gtk.Label("...")
self.label_set_gen_password_shared = gtk.Label("...")
self.button_regenerate = gtk.Button("Regenerate password")
self.button_regenerate.set_label(lt("Regenerate password", self.lang))
self.button_regenerate.connect("clicked", self.regenerate)
self.button_start = gtk.Button("Start")
self.button_start.set_label(lt("Start", self.lang))
self.button_start.connect("clicked", self.start)
self.button_start.set_size_request(110, 50)
self.button_stop = gtk.Button("Stop")
self.button_stop.set_label(lt("Stop", self.lang))
self.button_stop.connect("clicked", self.stop)
self.button_stop.set_size_request(110, 50)
self.button_options = gtk.Button("Options")
self.button_options.set_label(lt("Options", self.lang))
self.button_options.set_size_request(130, 30)
self.button_options.connect("clicked", self.show_option_window)
self.button_about = gtk.Button("About")
self.button_about.set_label(lt("About", self.lang))
self.button_about.set_size_request(130, 30)
self.button_about.connect("clicked", self.show_about_window)
self.button_quit = gtk.Button("Quit")
self.button_quit.set_label(lt("Quit", self.lang))
self.button_quit.set_size_request(130, 30)
self.button_quit.connect("clicked", self.close_app)
self.img_banner = gtk.Image()
self.img_banner.set_from_file(os.path.join(ROOT_PATH,
"static/banner1.png"))
self.fixed.put(self.img_banner, 0, 0)
self.fixed.put(self.label_status, 5, 5)
#self.fixed.put(self.label_local_ip, 3, 130)
#self.fixed.put(self.label_public_ip, 200 ,130)
self.fixed.put(self.label_local_ip, 5, 110)
self.fixed.put(self.label_public_ip, 5 ,130)
self.fixed.put(self.button_regenerate, 70, 200)
self.fixed.put(self.button_start, 0, 230)
self.fixed.put(self.button_stop, 120, 230)
self.fixed.put(self.label_gen_password, 0, 160)
self.fixed.put(self.label_set_gen_password, 150, 160)
self.fixed.put(self.label_gen_password_shared, 0, 180)
self.fixed.put(self.label_set_gen_password_shared, 150, 180)
self.fixed.put(self.button_options, 250, 170)
self.fixed.put(self.button_about, 250, 210)
self.fixed.put(self.button_quit, 250, 250)
self.add(self.fixed)
#show all
self.show_all()
#create pictures folder if not exist
if not os.path.exists(DEFAULT_IMAGES_PATH):
os.mkdir(DEFAULT_IMAGES_PATH)
#remove pid file when process not exist
remove_orphaned_pidfile(check_pidfile())
#set status
self.setstatus()
#update start stop buttons
self.toggle_start_stop_buttons()
#generate new login password
self.gen_login_password()
self.gen_shared_password()
def regenerate(self, widgget, data=None):
self.gen_login_password()
self.gen_shared_password()
def setstatus(self):
self.config["status"]=bool(check_pidfile())
#get and save global ip
gip=get_global_ip_address()
self.config["global_ip"]=gip
self.label_public_ip.set_text(lt("Public IP", self.lang)+": "+\
self.config["global_ip"]+":"+str(self.config["port"]))
# get and save local IP
lip=get_local_ip_address()
self.config["local_ip"]=lip
self.label_local_ip.set_text(lt("Local IP", self.lang)+": "+\
self.config["local_ip"]+":"+str(self.config["port"]))
if self.config["status"]:
self.label_status.set_text("Status: "+lt("Online", self.lang))
else:
self.label_status.set_text("Status: "+lt("Offline", self.lang))
def toggle_start_stop_buttons(self):
serverpid=check_pidfile()
if serverpid:
self.button_start.set_sensitive(False)
self.button_stop.set_sensitive(True)
else:
self.button_start.set_sensitive(True)
self.button_stop.set_sensitive(False)
def start(self, widget, data=None):
"""Start web server"""
if self.server:return
serv=start_server(self.config)
if serv:
self.server=serv
self.toggle_start_stop_buttons()
#set status
self.setstatus()
save_config(self.config)
print("Web server started")
ddd = subprocess.Popen("/usr/bin/notify-send Server Started", shell=True)
ddd.poll()
def stop(self, widget, data=None):
"""Stop web server"""
if stop_server(self.config):
self.server=None
self.setstatus()
self.toggle_start_stop_buttons()
#save config
save_config(self.config)
def close_app(self, widget, data=None):
exit(0)
def show_option_window(self, widget, data=None):
OptionWindow(self.config)
def show_about_window(self, widget, data=None):
AboutWindow(self.config)
def gen_login_password(self):
pwd=generate_password()
hpwd=hash_password(pwd)
self.config["gen_password"] = hpwd
save_config(self.config)
#set text for widget with password
self.label_set_gen_password.set_text(pwd)
def gen_shared_password(self):
pwd=generate_password()
hpwd=hash_password(pwd)
self.config["gen_password_shared"] = hpwd
save_config(self.config)
#set text for widget with password
self.label_set_gen_password_shared.set_text(pwd)
class OptionWindow(gtk.Window):
def __init__(self, config):
self.config=config
self.lang=self.config.get("lang", "en")
gtk.Window.__init__(self)
self.set_title(lt("Options", self.lang))
self.set_resizable(False)
self.set_size_request(300, 250)
self.set_border_width(20)
self.set_position(gtk.WIN_POS_CENTER)
self.connect("destroy", self.close_window)
self.fixed = gtk.Fixed()
self.label_set_pass = gtk.Label("Password:")
self.label_set_pass.set_text(lt("Password", self.lang)+":")
self.entry_set_pass = gtk.Entry()
self.label_startup = gtk.Label("Load in startup Ubuntu")
self.label_startup.set_text(lt("Load in startup Ubuntu", self.lang))
self.check_startup = gtk.CheckButton()
self.check_startup.set_active(self.config["startup"])
self.check_startup.connect("toggled", self.entry_checkbox)
self.label_set_port = gtk.Label("Port:")
self.label_set_port.set_text(lt("Port", self.lang)+":")
self.entry_set_port = gtk.Entry()
self.entry_set_port.set_text(str(self.config["port"]))
self.label_choose_image = gtk.Label("Choose images folder")
self.label_choose_image.set_text(lt("Choose images folder", self.lang)+":")
self.chooser_image_folder = \
gtk.FileChooserButton(lt("Choose images folder", self.lang))
self.chooser_image_folder.set_action(
gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER
)
self.chooser_image_folder.set_size_request(150,35)
self.label_set_language = gtk.Label("Set language:")
self.label_set_language.set_text(lt("Set language", self.lang)+":")
self.combo_language = gtk.combo_box_new_text()
#add languages
for lang in langs:
self.combo_language.append_text(lang)
self.combo_language.set_active(langs.keys().index(\
self.config["lang"]))
self.combo_language.connect("changed", self.select_language)
#get images path
imgpath=self.config.get("images_path", \
os.path.expanduser("~/Pictures"))
self.chooser_image_folder.set_filename(imgpath)
self.button_save = gtk.Button("Save")
self.button_save.set_size_request(130, 30)
self.button_save.connect("clicked", self.onsave)
self.fixed.put(self.label_set_pass, 10, 5)
self.fixed.put(self.entry_set_pass, 90, 0)
#self.fixed.put(self.check_startup, 5, 42)
#self.fixed.put(self.label_startup, 40, 44)
self.fixed.put(self.label_set_port, 10, 44)
self.fixed.put(self.entry_set_port, 90 ,42)
self.fixed.put(self.label_choose_image, 10, 90)
self.fixed.put(self.chooser_image_folder, 10, 110)
self.fixed.put(self.label_set_language, 10, 150)
self.fixed.put(self.combo_language, 10, 170)
self.fixed.put(self.button_save, 120, 170)
self.add(self.fixed)
self.show_all()
def select_language(self, combo_language, data=None):
model = self.combo_language.get_model()
index = self.combo_language.get_active()
self.config["lang"]=langs.keys()[index]
dlg=gtk.MessageDialog(self, gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_INFO, gtk.BUTTONS_CLOSE,
"Launcher restart required.")
dlg.run()
dlg.destroy()
def onsave(self, widget, data=None):
"""Save options configuration"""
from main import SECRET_KEY
passwd = self.entry_set_pass.get_text()
check = self.check_startup.get_active()
port = self.entry_set_port.get_text()
hashed=hashlib.md5()
hashed.update(SECRET_KEY+passwd)
if passwd:
self.config["password"] = hashed.hexdigest()
self.config["startup"] = check
self.config["port"] = int(port)
self.config["images_path"] = self.chooser_image_folder.get_filename()
save_config(self.config)
self.destroy()
def add_startup(self, data=None):
a = os.path.expanduser("~")
b = ".xinitrc"
c = os.path.join(a,b)
if not os.path.isfile(c):
os.system("cd ~ && touch .xinitrc")
open(c, 'w').write("/opt/uair/bin/uairlauncher start")
elif os.path.isfile(c):
open(c, 'a').write("/opt/uair/bin/uairlauncher start")
def del_startup(self, data=None):
try:
zrodlo = open('~/.xinitrc').readlines()
cel = open('~/.xinitrc', 'a')
for s in zrodlo:
cel.write(s.replace("/opt/uair/bin/uairlauncher start", ""))
cel.close()
except:
pass
def entry_checkbox(self, widget):
global b_entry_checkbox
b_entry_checkbox = self.check_startup.get_active()
if b_entry_checkbox:
self.add_startup()
else:
self.del_startup()
return
def close_window(self, widget, data=None):
self.destroy()
class AboutWindow(gtk.Window):
def __init__(self, config):
self.config=config
self.lang=self.config.get("lang", "en")
gtk.Window.__init__(self)
self.set_resizable(False)
self.set_title(lt("About", self.lang))
self.set_size_request(540, 250)
self.set_border_width(20)
self.set_position(gtk.WIN_POS_CENTER)
self.connect("destroy", self.close_window)
self.fixed = gtk.Fixed()
self.label_about = gtk.Label(lt("U-Air\n\
---------------------\n\
U-Air allow you to browse upload and download your files, wherever you are.\n\
You forget take any file from home to your friend ? Now its not problem.\n\
You can easly browse your files, upload new and listen MP3 songs.", self.lang))
self.label_authors=gtk.Label("Authors:")
self.label_authors.set_text(lt("Authors", self.lang)+":")
author1="Michal Rosiak <[email protected]>"
author2="Marcin Swierczynski <[email protected]>"
self.label_autor1 = gtk.Label(author1)
self.label_autor2 = gtk.Label(author2)
self.fixed.put(self.label_about, 5, 10)
self.fixed.put(self.label_authors, 5, 120)
self.fixed.put(self.label_autor1, 10, 150)
self.fixed.put(self.label_autor2, 10, 180)
self.add(self.fixed)
self.show_all()
def close_window(self, widget, data=None):
self.destroy()
#class HelpConsole():
# def __init__(self):
# print "Help Console:\n"
# print "start_console - Start web server in console"
if __name__ == "__main__":
main=MainWindow()
#get arguments
if "start" in sys.argv:
main.hide()
main.start(None)
main.close_app(None)
elif "stop" in sys.argv:
main.hide()
main.stop(None)
main.close_app(None)
try:
gtk.main()
except:
import traceback
traceback.print_exc()
main.stop(None)
| gpl-3.0 | 3,870,659,540,756,294,000 | 31.196309 | 99 | 0.597738 | false |
Maescool/libk8000 | setup.py | 1 | 1727 | #!/usr/bin/env python
#
# k8000 install script
#
# See COPYING for info about the license (GNU GPL)
# Check AUTHORS to see who wrote this software.
from distutils.core import setup
from distutils.extension import Extension
import sys, glob, re, os
# Check for Python < 2.2
if sys.version < '2.2':
sys.exit('Error: Python-2.2 or newer is required. Current version:\n %s'
% sys.version)
authors = [ ('Pieter Maes', '[email protected]') ]
lname = max([len(author[0]) for author in authors])
__author__ = '\n'.join(['%s <%s>' % (author[0].ljust(lname), author[1])
for author in authors])
short = 'Connection library to Velleman Kit K8000'
long = '''\
This is a connection Library to the Velleman K8000 Input/Output interface card.'''
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Input/output :: Driver' ]
py_version='python%d.%d' % (sys.version_info[0],sys.version_info[1])
incl_dir = [ os.path.join(sys.prefix,'include',py_version), os.curdir ]
setup(name='pyk8000',
version='0.1',
description=short,
long_description=long,
classifiers=classifiers,
author=', '.join([author[0] for author in authors]),
author_email=', '.join([author[1] for author in authors]),
url='http://github.com/Maescool/libk8000',
ext_modules = [Extension('_k8000', sources=['k8000.c','k8000.i'],swig_opts=['-shadow'])],
py_modules = ["k8000"],
license='GPL'
)
| gpl-2.0 | 6,887,290,133,431,512,000 | 33.54 | 95 | 0.643312 | false |
ubyssey/dispatch | dispatch/tests/test_widget_register.py | 1 | 2506 | from dispatch.theme.widgets import Zone, Widget
from dispatch.theme.fields import CharField, TextField, ArticleField, ImageField, WidgetField, Field
from dispatch.theme import register
from dispatch.tests.cases import DispatchAPITestCase, DispatchMediaTestMixin
from dispatch.theme.exceptions import InvalidWidget, InvalidZone
class TestZone(Zone):
id = 'test-zone'
name = 'Test zone'
class TestZoneNoID(Zone):
name = 'Test zone'
class TestZoneNoName(Zone):
id = 'test-zone'
class TestWidgetNoID(Widget):
name = 'Test widget'
template = 'widgets/test-widget.html'
zones = [TestZone]
class TestWidgetNoName(Widget):
id = 'test-widget'
template = 'widgets/test-widget.html'
zones = [TestZone]
class TestWidgetNoTemplate(Widget):
id = 'test-widget'
name = 'Test widget'
zones = [TestZone]
class TestWidgetNoZone(Widget):
id = 'test-widget'
name = 'Test widget'
template = 'widgets/test-widget.html'
class WidgetAttributeTest(DispatchAPITestCase, DispatchMediaTestMixin):
def test_validate_missing_widget_attributes(self):
"""Validating widgets with various missing attributes should result in errors"""
try:
register.widget(TestWidgetNoID)
self.fail("%s must contain a valid 'id' attribute" % widget.__name__)
except InvalidWidget:
pass
try:
register.widget(TestWidgetNoName)
self.fail("%s must contain a valid 'name' attribute" % widget.__name__)
except InvalidWidget:
pass
try:
register.widget(TestWidgetNoTemplate)
self.fail("%s must contain a valid 'template' attribute" % widget.__name__)
except InvalidWidget:
pass
try:
register.widget(TestWidgetNoZone)
self.fail("%s must contain a valid 'Zone' attribute" % widget.__name__)
except InvalidWidget:
pass
class ZoneAttributeTest(DispatchAPITestCase, DispatchMediaTestMixin):
def test_validate_missing_zone_attributes(self):
"""Validating zone with various missing attributes should result in errors"""
try:
register.zone(TestZoneNoID)
self.fail("%s must contain a valid 'id' attribute" % zone.__name__)
except:
pass
try:
register.zone(TestZoneNoName)
self.fail("%s must contain a valid 'name' attribute" % zone.__name__)
except:
pass
| gpl-2.0 | -8,221,618,396,399,884,000 | 29.938272 | 100 | 0.650439 | false |
truevision/django_banklink | django_banklink/views.py | 1 | 2422 | from django.template import RequestContext
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.shortcuts import get_object_or_404
from django.views.decorators.csrf import csrf_exempt
from django_banklink.forms import PaymentRequest
from django_banklink.utils import verify_signature
from django_banklink.models import Transaction
from django_banklink.signals import transaction_succeeded
from django_banklink.signals import transaction_failed
@csrf_exempt
def response(request):
if request.method == 'POST':
data = request.POST
else:
data = request.GET
if 'VK_MAC' not in data:
raise Http404("VK_MAC not in request")
signature_valid = verify_signature(data, data['VK_MAC'])
if not signature_valid:
raise Http404("Invalid signature. ")
transaction = get_object_or_404(Transaction, pk = data['VK_REF'])
if data['VK_AUTO'] == 'Y':
transaction.status = 'C'
transaction.save()
transaction_succeeded.send(Transaction, transaction = transaction)
return HttResponse("request handled, swedbank")
else:
if data['VK_SERVICE'] == '1901':
url = transaction.redirect_on_failure
transaction.status = 'F'
transaction.save()
transaction_failed.send(Transaction, transaction = transaction)
else:
url = transaction.redirect_after_success
return HttpResponseRedirect(url)
def request(request, description, message, amount, currency, redirect_to):
if 'HTTP_HOST' not in request.META:
raise Http404("HTTP/1.1 protocol only, specify Host header")
protocol = 'https' if request.is_secure() else 'http'
url = '%s://%s%s' % (protocol, request.META['HTTP_HOST'], reverse(response))
context = RequestContext(request)
user = None if request.user.is_anonymous() else request.user
context['form'] = PaymentRequest(description = description,
amount = amount,
currency = currency,
redirect_to = redirect_to,
message = message,
user = user)
return render_to_response("django_banklink/request.html", context)
| bsd-3-clause | 8,737,867,333,097,925,000 | 41.491228 | 80 | 0.650702 | false |
denniskempin/safetynet | safetynet_tests.py | 1 | 7873 | from collections import OrderedDict
import unittest
from safetynet import (Dict, InterfaceMeta, List, Optional, Tuple,
_ValidateValue, typecheck)
class CustomType(object):
pass
class CustomSubType(CustomType):
pass
def DefineTypeCheckExample():
"""Defines an example class.
Note that we are doing this in a function. Some errors detected by
InterfaceMeta are thrown at the time of definition. We want to catch those
in the tests.
"""
class TypeCheckExample(object):
__metaclass__ = InterfaceMeta
def docstring_example(self, a, b, c, d, e, return_):
""" Docstring
:type a: CustomType
:type b: List[int]
:type c: Dict[str, int]
:type d: callable
:type e: Optional[int]
:rtype: int
"""
return return_
@typecheck(a=CustomType, b=List[int], c=Dict[str, int],
d=callable, e=Optional[int], returns=int)
def annotation_example(self, a, b, c, d, e, return_):
return return_
@typecheck(a="TypeCheckExample")
def self_reference_example(self, a):
pass
def self_reference_example2(self, a):
""" Docstring
:type a: TypeCheckExample
"""
return TypeCheckExample
class TypeCheckTests(unittest.TestCase):
def assert_correct_example_type_checks(self, function):
""" Assumes function has the following type checks and tests them.
a: CustomType
b: List[int]
c: Dict[str, int]
d: callable
e: Optional[int]
return value: int
The function should return the argument: return_
"""
def get_args(kwargs):
defaults = [("a", CustomType()), ("b", []), ("c", {}),
("d", lambda: None), ("e", 1), ("return_", 1)]
args = OrderedDict(defaults)
args.update(kwargs)
return args
def assert_success(**kwargs):
args = get_args(kwargs)
function(*args.values())
function(**args)
def assert_failure(**kwargs):
args = get_args(kwargs)
self.assertRaises(TypeError, function, *args.values())
self.assertRaises(TypeError, function, **args)
# Test CustomType
assert_success(a=CustomSubType())
assert_failure(a=1)
assert_failure(a=None)
# Test List[int]
assert_success(b=[1, 2, 3])
assert_failure(b=[1.0])
assert_failure(b=[1, None, 3])
assert_failure(b=None)
# Test Dict[str, int]
assert_success(c={"key": 1})
assert_failure(c={"key": None})
assert_failure(c={"key": "1"})
assert_failure(c={1: 1})
assert_failure(c=None)
# Test callable
def dummy():
pass
assert_success(d=lambda a, b, c: None)
assert_success(d=dummy)
assert_success(d=CustomType)
assert_failure(d=None)
assert_failure(d=1)
# Test Optional[int]
assert_success(e=1)
assert_success(e=None)
assert_failure(e=1.0)
# Test return value
assert_failure(return_=1.0)
assert_failure(return_=None)
def test_type_annotation(self):
@typecheck(a=CustomType, b=List[int], c=Dict[str, int],
d=callable, e=Optional[int], returns=int)
def test_function(a, b, c, d, e, return_):
return return_
self.assert_correct_example_type_checks(test_function)
def test_string_type_annotation(self):
@typecheck(a="CustomType", b="List[int]", c="Dict[str, int]",
d="callable", e="Optional[int]", returns="int")
def test_function(a, b, c, d, e, return_):
return return_
self.assert_correct_example_type_checks(test_function)
def test_docstring_param_annotation(self):
@typecheck
def test_function(a, b, c, d, e, return_):
""" Docstring
:param CustomType a: description
:param List[int] b: description
:param Dict[str, int] c
:param callable d: description
:param Optional[int] e
:returns int: description
"""
return return_
self.assert_correct_example_type_checks(test_function)
def test_docstring_type_annotation(self):
@typecheck
def test_function(a, b, c, d, e, return_):
""" Docstring
:type a: CustomType
:type b: List[int]
:type c: Dict[str, int]
:type d: callable
:type e: Optional[int]
:rtype: int
"""
return return_
self.assert_correct_example_type_checks(test_function)
def test_class_docstring_annotation(self):
instance = DefineTypeCheckExample()()
self.assert_correct_example_type_checks(instance.docstring_example)
def test_class_type_annotation(self):
instance = DefineTypeCheckExample()()
self.assert_correct_example_type_checks(instance.annotation_example)
def test_class_self_reference(self):
instance = DefineTypeCheckExample()()
instance.self_reference_example(instance)
instance.self_reference_example2(instance)
self.assertRaises(TypeError, instance.self_reference_example, None)
self.assertRaises(TypeError, instance.self_reference_example2, None)
def test_class_inheritance_typechecks(self):
class InheritedExample(DefineTypeCheckExample()):
pass
instance = InheritedExample()
self.assert_correct_example_type_checks(instance.docstring_example)
self.assert_correct_example_type_checks(instance.annotation_example)
def test_class_override_typechecks(self):
class OverrideExample(DefineTypeCheckExample()):
def docstring_example(self, a, b, c, d, e, return_):
return return_
instance = OverrideExample()
self.assert_correct_example_type_checks(instance.docstring_example)
self.assert_correct_example_type_checks(instance.annotation_example)
def test_compile_time_type_errors(self):
self.assertRaises(TypeError, typecheck, (), dict(a="UnknownName"))
def test_docstring_preserved(self):
@typecheck
def test_function():
"docstring"
self.assertEqual(test_function.__doc__, "docstring")
def test_return_value_preserved(self):
@typecheck
def test_function():
return 42
self.assertEqual(test_function(), 42)
def test_class_override_arguments_check(self):
def DefineClass():
class OverrideExample(DefineTypeCheckExample()):
def docstring_example(self, a, b, c, CHANGES, e, return_):
return return_
self.assertRaises(TypeError, DefineClass)
def test_class_override_public_check(self):
def DefineClass():
class OverrideExample(DefineTypeCheckExample()):
def public_method_not_defined_in_interface(self):
pass
self.assertRaises(TypeError, DefineClass)
def test_object_init_check(self):
class Example(object):
__metaclass__ = InterfaceMeta
def __init__(self, a, b):
"""
:param bool a:
:param str b:
"""
Example(False, "str")
self.assertRaises(TypeError, lambda: Example(True, 1))
self.assertRaises(TypeError, lambda: Example(1, "str"))
def test_property_check(self):
class Example(object):
__metaclass__ = InterfaceMeta
@property
def valid(self):
"""
:returns int:
"""
return 42
@property
def invalid(self):
"""
:returns int:
"""
return "42"
instance = Example()
self.assertEqual(instance.valid, 42)
self.assertRaises(TypeError, lambda: instance.invalid)
def test_class_variables_untouched(self):
class VariablesExample:
__metaclass__ = InterfaceMeta
variable = 1
self.assertEqual(VariablesExample.variable, 1)
def test_tuple_check(self):
self.assertTrue(_ValidateValue((1, "str"), Tuple[int, str]))
self.assertTrue(_ValidateValue(None, Optional[Tuple[int, str]]))
self.assertFalse(_ValidateValue(("1", "str"), Tuple[int, str]))
self.assertFalse(_ValidateValue((1, "str", 3), Tuple[int, str]))
self.assertFalse(_ValidateValue((), Tuple[int, str]))
| mit | -4,629,023,588,682,049,000 | 28.82197 | 76 | 0.644608 | false |
cmazuc/scarfage | scarf/user.py | 1 | 5046 | import re
import string
from scarf import app
from flask import redirect, url_for, render_template, session, request, flash
from core import redirect_back, SiteUser, NoUser, new_user, AuthFail, check_email
from main import PageData
import logging
import config
app.secret_key = config.SECRETKEY
BANNED = ['.*\.pl$',
'.*\.ru$',
'.*\.ovh$',
'.*servicesp.bid$',
'.*@mailtomee.com$',
'.*@maileto.com$',
'.*\.linuxpl.eu$',
'.*@vam2006.eu$',
'.*\.pw$',
'.*\.biz$',
'.*\.host$',
'.*\.info$',
'.*\.surf$',
'.*@sporlo.eu$',
'.*@zoho.eu$',
'.*@shearsplendor.co$',
'.*\.mailedu.de$',
'.*\.online$',
'.*@gmx.com$',
'.*@pezhub.org$',
'.*@zoho.eu$',
'.*@kupony.org$',
'.*@hovercraft-italia.eu$',
'.*@wrapn.net$',
'.*@uhumail.com$',
'.*@awsmail.com$',
'.*@gebaeudereinigungsfirma.com$',
'.*@gartenpflegemuenchen.de$',
'.*@eisfeld-gebaeudereinigung.de$',
'.*@salonyfryzjerskie.info$']
logger = logging.getLogger(__name__)
def check_new_user(request, nopass=False):
ret = True
try:
user = SiteUser.create(request.form['username'])
flash("User already exists!")
ret = False
except NoUser:
if check_email(request.form['email']):
flash("You may not create multiple users with the same email address.")
return False
valid = string.ascii_letters + string.digits + ' '
for c in request.form['username']:
if c not in valid:
flash("Invalid character in username: " + c)
ret = False
if not nopass:
pass1 = request.form['password']
pass2 = request.form['password2']
if pass1 != pass2:
flash("The passwords entered don't match.")
ret = False
else:
if len(pass1) < 6:
flash("Your password is too short, it must be at least 6 characters")
ret = False
for regex in BANNED:
if re.match(regex, request.form['email']):
flash("This domain has been banned.")
logger.info('Banned email address rejected: {}'.format(request.form['email']))
ret = False
if not re.match("[^@]+@[^@]+\.[^@]+", request.form['email']):
flash("Invalid email address")
ret = False
return ret
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'POST':
try:
user = SiteUser.create(request.form['username'])
except NoUser as e:
flash('Login unsuccessful.')
return redirect_back(url_for('index'))
try:
user.authenticate(request.form['password'])
except (NoUser, AuthFail) as e:
if user.accesslevel is 0:
flash('Your account has been banned')
session.pop('username', None)
else:
flash('Login unsuccessful.')
return redirect_back(url_for('index'))
user.seen()
session['username'] = user.username
session.permanent = True
flash('You were successfully logged in')
if not request.args.get('index'):
return redirect_back(url_for('index'))
else:
return redirect(url_for('index'))
return redirect(url_for('error'))
@app.route('/newuser', methods=['GET', 'POST'])
def newuser():
pd = PageData();
pd.title = "New User"
if 'username' in session:
flash('You are already logged in.')
return redirect(url_for('index'))
else:
if request.method == 'POST':
if not check_new_user(request):
pd.username = request.form['username']
pd.email = request.form['email']
return render_template('new_user.html', pd=pd)
if not new_user(request.form['username'], request.form['password'], request.form['email'], request.remote_addr):
return render_template('error.html', pd=pd)
try:
user = SiteUser.create(request.form['username'])
user.authenticate(request.form['password'])
session['username'] = user.username
except (NoUser, AuthFail):
return render_template('error.html', pd=pd)
flash('Welcome ' + request.form['username'])
return redirect(url_for('index'))
return render_template('new_user.html', pd=pd)
@app.route('/logout')
def logout():
for key in session.keys():
if 'facebook' not in key:
session.pop(key, None)
flash('You were successfully logged out')
if not request.args.get('index'):
return redirect_back(url_for('index'))
else:
return redirect(url_for('index'))
| gpl-2.0 | -4,444,595,495,403,300,000 | 30.735849 | 124 | 0.525565 | false |
rymurr/cpp-class | pygui/pyscp.py | 1 | 2857 | from sys import stdout
from twisted.python.log import startLogging, err
from twisted.internet import reactor
from twisted.internet.defer import Deferred, succeed
from twisted.conch.ssh.common import NS
from twisted.conch.scripts.cftp import ClientOptions
from twisted.conch.ssh import filetransfer
from twisted.conch.ssh.filetransfer import FileTransferClient
from twisted.conch.client.connect import connect
from twisted.conch.client.default import SSHUserAuthClient, verifyHostKey
from twisted.conch.ssh.connection import SSHConnection
from twisted.conch.ssh.channel import SSHChannel
class ClientUserAuth(SSHUserAuthClient):
def getGenericAnswers(self, name, instruction, prompts):
responses = []
print "I got called too!"
if name:
print name
if instruction:
print instruction
for prompt, echo in prompts:
if echo:
responses.append(raw_input(prompt))
else:
responses.append("jun8ahrd425dec30")
return succeed(responses)
def getPassword(self, prompt = None):
print "I am not being called!"
return succeed("************")
def getPublicKey(self):
print "I am being called!"
return
class SFTPSession(SSHChannel):
name = 'session'
def channelOpen(self, whatever):
d = self.conn.sendRequest(
self, 'subsystem', NS('sftp'), wantReply=True)
d.addCallbacks(self._cbSFTP)
def _cbSFTP(self, result):
client = FileTransferClient()
client.makeConnection(self)
self.dataReceived = client.dataReceived
self.conn._sftp.callback(client)
class SFTPConnection(SSHConnection):
def serviceStarted(self):
self.openChannel(SFTPSession())
def sftp(user, host, port):
options = ClientOptions()
options['host'] = host
options['port'] = port
conn = SFTPConnection()
conn._sftp = Deferred()
auth = ClientUserAuth(user, options, conn)
connect(host, port, options, verifyHostKey, auth)
return conn._sftp
def transfer(client,filename,attrs,filestring):
d = client.openFile(filename, filetransfer.FXF_READ|filetransfer.FXF_WRITE|filetransfer.FXF_CREAT, attrs)
def _fileWrite(openFile):
d = _writeChunk(openFile)
return d
def _writeChunk(openFile):
d = openFile.writeChunk(1,filestring)
print "File sent successfully"
return d
d.addCallback(_fileWrite)
return d
def main():
startLogging(stdout)
user = 'ryanmurray'
host = 'localhost'
port = 22
d = sftp(user, host, port)
d.addCallback(transfer,"test.dat",{"permissions":33261,},"c"*10)
d.addErrback(err, "Problem with SFTP transfer")
d.addCallback(lambda ignored: reactor.stop())
reactor.run()
if __name__ == '__main__':
main()
| gpl-3.0 | 7,092,885,815,837,879,000 | 28.153061 | 109 | 0.669933 | false |
emergebtc/muddery | muddery/typeclasses/players.py | 1 | 3755 | """
This is adapt from evennia/evennia/players/players.py.
The licence of Evennia can be found in evennia/LICENSE.txt.
Player
The Player represents the game "account" and each login has only one
Player object. A Player is what chats on default channels but has no
other in-game-world existance. Rather the Player puppets Objects (such
as Characters) in order to actually participate in the game world.
Guest
Guest players are simple low-level accounts that are created/deleted
on the fly and allows users to test the game without the committment
of a full registration. Guest accounts are deactivated by default; to
activate them, add the following line to your settings file:
GUEST_ENABLED = True
You will also need to modify the connection screen to reflect the
possibility to connect with a guest account. The setting file accepts
several more options for customizing the Guest account system.
"""
import json
from evennia.utils.utils import make_iter
from evennia.players.players import DefaultPlayer, DefaultGuest
class MudderyPlayer(DefaultPlayer):
"""
This class describes the actual OOC player (i.e. the user connecting
to the MUD). It does NOT have visual appearance in the game world (that
is handled by the character which is connected to this). Comm channels
are attended/joined using this object.
It can be useful e.g. for storing configuration options for your game, but
should generally not hold any character-related info (that's best handled
on the character level).
Can be set using BASE_PLAYER_TYPECLASS.
"""
def msg(self, text=None, from_obj=None, sessid=None, **kwargs):
"""
Evennia -> User
This is the main route for sending data back to the user from the
server.
Args:
text (str, optional): data to send
from_obj (Object or Player, optional): object sending. If given,
its at_msg_send() hook will be called.
sessid (int or list, optional): session id or ids to receive this
send. If given, overrules MULTISESSION_MODE.
Notes:
All other keywords are passed on to the protocol.
"""
raw = kwargs.get("raw", False)
if not raw:
try:
text = json.dumps(text)
except Exception, e:
text = json.dumps({"err": "There is an error occurred while outputing messages."})
logger.log_errmsg("json.dumps failed: %s" % e)
# set raw=True
if kwargs:
kwargs["raw"] = True
else:
kwargs = {"raw": True}
if from_obj:
# call hook
try:
from_obj.at_msg_send(text=text, to_obj=self, **kwargs)
except Exception:
pass
# session relay
if sessid:
# this could still be an iterable if sessid is an iterable
sessions = self.get_session(sessid)
if sessions:
# this is a special instruction to ignore MULTISESSION_MODE
# and only relay to this given session.
kwargs["_nomulti"] = True
for session in make_iter(sessions):
session.msg(text=text, **kwargs)
return
# we only send to the first of any connected sessions - the sessionhandler
# will disperse this to the other sessions based on MULTISESSION_MODE.
sessions = self.get_all_sessions()
if sessions:
sessions[0].msg(text=text, **kwargs)
class MudderyGuest(DefaultGuest):
"""
This class is used for guest logins. Unlike Players, Guests and their
characters are deleted after disconnection.
"""
pass
| bsd-3-clause | -812,016,742,235,505,500 | 34.424528 | 98 | 0.645806 | false |
PwnArt1st/searx | searx/preferences.py | 1 | 11106 | from searx import settings, autocomplete
from searx.languages import language_codes as languages
COOKIE_MAX_AGE = 60 * 60 * 24 * 365 * 5 # 5 years
LANGUAGE_CODES = [l[0] for l in languages]
LANGUAGE_CODES.append('all')
DISABLED = 0
ENABLED = 1
class MissingArgumentException(Exception):
pass
class ValidationException(Exception):
pass
class Setting(object):
"""Base class of user settings"""
def __init__(self, default_value, **kwargs):
super(Setting, self).__init__()
self.value = default_value
for key, value in kwargs.iteritems():
setattr(self, key, value)
self._post_init()
def _post_init(self):
pass
def parse(self, data):
self.value = data
def get_value(self):
return self.value
def save(self, name, resp):
resp.set_cookie(name, bytes(self.value), max_age=COOKIE_MAX_AGE)
class StringSetting(Setting):
"""Setting of plain string values"""
pass
class EnumStringSetting(Setting):
"""Setting of a value which can only come from the given choices"""
def _validate_selection(self, data):
if data != '' and data not in self.choices:
raise ValidationException('Invalid default value: {0}'.format(data))
def _post_init(self):
if not hasattr(self, 'choices'):
raise MissingArgumentException('Missing argument: choices')
self._validate_selection(self.value)
def parse(self, data):
self._validate_selection(data)
self.value = data
class MultipleChoiceSetting(EnumStringSetting):
"""Setting of values which can only come from the given choices"""
def _validate_selections(self, items):
for item in items:
if item not in self.choices:
raise ValidationException('Invalid choice: {0}'.format(self.value))
def _post_init(self):
if not hasattr(self, 'choices'):
raise MissingArgumentException('Missing argument: choices')
self._validate_selections(self.value)
def parse(self, data):
if data == '':
self.value = []
return
elements = data.split(',')
self._validate_selections(elements)
self.value = elements
def parse_form(self, data):
self.value = []
for choice in data:
if choice in self.choices and choice not in self.value:
self.value.append(choice)
def save(self, name, resp):
resp.set_cookie(name, ','.join(self.value), max_age=COOKIE_MAX_AGE)
class MapSetting(Setting):
"""Setting of a value that has to be translated in order to be storable"""
def _validate_value(self, data):
if data not in self.map.values():
raise ValidationException('Invalid value: {0}'.format(data))
def _validate_key(self, key):
if key not in self.map:
raise ValidationException('Invalid key: {0}'.format(key))
def _post_init(self):
if not hasattr(self, 'map'):
raise MissingArgumentException('missing argument: map')
self._validate_value(self.value)
def parse(self, data):
self._validate_key(data)
self.value = self.map[data]
self.key = data
def save(self, name, resp):
resp.set_cookie(name, bytes(self.key), max_age=COOKIE_MAX_AGE)
class SwitchableSetting(Setting):
""" Base class for settings that can be turned on && off"""
def _post_init(self):
self.disabled = set()
self.enabled = set()
if not hasattr(self, 'choices'):
raise MissingArgumentException('missing argument: choices')
def transform_form_items(self, items):
return items
def transform_values(self, values):
return values
def parse_cookie(self, data):
if data[DISABLED] != '':
self.disabled = set(data[DISABLED].split(','))
if data[ENABLED] != '':
self.enabled = set(data[ENABLED].split(','))
def parse_form(self, items):
items = self.transform_form_items(items)
self.disabled = set()
self.enabled = set()
for choice in self.choices:
if choice['default_on']:
if choice['id'] in items:
self.disabled.add(choice['id'])
else:
if choice['id'] not in items:
self.enabled.add(choice['id'])
def save(self, resp):
resp.set_cookie('disabled_{0}'.format(self.value), ','.join(self.disabled), max_age=COOKIE_MAX_AGE)
resp.set_cookie('enabled_{0}'.format(self.value), ','.join(self.enabled), max_age=COOKIE_MAX_AGE)
def get_disabled(self):
disabled = self.disabled
for choice in self.choices:
if not choice['default_on'] and choice['id'] not in self.enabled:
disabled.add(choice['id'])
return self.transform_values(disabled)
def get_enabled(self):
enabled = self.enabled
for choice in self.choices:
if choice['default_on'] and choice['id'] not in self.disabled:
enabled.add(choice['id'])
return self.transform_values(enabled)
class EnginesSetting(SwitchableSetting):
def _post_init(self):
super(EnginesSetting, self)._post_init()
transformed_choices = []
for engine_name, engine in self.choices.iteritems():
for category in engine.categories:
transformed_choice = dict()
transformed_choice['default_on'] = not engine.disabled
transformed_choice['id'] = '{}__{}'.format(engine_name, category)
transformed_choices.append(transformed_choice)
self.choices = transformed_choices
def transform_form_items(self, items):
return [item[len('engine_'):].replace('_', ' ').replace(' ', '__') for item in items]
def transform_values(self, values):
if len(values) == 1 and next(iter(values)) == '':
return list()
transformed_values = []
for value in values:
engine, category = value.split('__')
transformed_values.append((engine, category))
return transformed_values
class PluginsSetting(SwitchableSetting):
def _post_init(self):
super(PluginsSetting, self)._post_init()
transformed_choices = []
for plugin in self.choices:
transformed_choice = dict()
transformed_choice['default_on'] = plugin.default_on
transformed_choice['id'] = plugin.id
transformed_choices.append(transformed_choice)
self.choices = transformed_choices
def transform_form_items(self, items):
return [item[len('plugin_'):] for item in items]
class Preferences(object):
"""Stores, validates and saves preferences to cookies"""
def __init__(self, themes, categories, engines, plugins):
super(Preferences, self).__init__()
self.key_value_settings = {'categories': MultipleChoiceSetting(['general'], choices=categories),
'language': EnumStringSetting(settings['search']['language'],
choices=LANGUAGE_CODES),
'locale': EnumStringSetting(settings['ui']['default_locale'],
choices=settings['locales'].keys() + ['']),
'autocomplete': EnumStringSetting(settings['search']['autocomplete'],
choices=autocomplete.backends.keys() + ['']),
'image_proxy': MapSetting(settings['server']['image_proxy'],
map={'': settings['server']['image_proxy'],
'0': False,
'1': True}),
'method': EnumStringSetting('POST', choices=('GET', 'POST')),
'safesearch': MapSetting(settings['search']['safe_search'], map={'0': 0,
'1': 1,
'2': 2}),
'theme': EnumStringSetting(settings['ui']['default_theme'], choices=themes),
'results_on_new_tab': MapSetting(False, map={'0': False, '1': True})}
self.engines = EnginesSetting('engines', choices=engines)
self.plugins = PluginsSetting('plugins', choices=plugins)
self.unknown_params = {}
def parse_cookies(self, input_data):
for user_setting_name, user_setting in input_data.iteritems():
if user_setting_name in self.key_value_settings:
self.key_value_settings[user_setting_name].parse(user_setting)
elif user_setting_name == 'disabled_engines':
self.engines.parse_cookie((input_data.get('disabled_engines', ''),
input_data.get('enabled_engines', '')))
elif user_setting_name == 'disabled_plugins':
self.plugins.parse_cookie((input_data.get('disabled_plugins', ''),
input_data.get('enabled_plugins', '')))
def parse_form(self, input_data):
disabled_engines = []
enabled_categories = []
disabled_plugins = []
for user_setting_name, user_setting in input_data.iteritems():
if user_setting_name in self.key_value_settings:
self.key_value_settings[user_setting_name].parse(user_setting)
elif user_setting_name.startswith('engine_'):
disabled_engines.append(user_setting_name)
elif user_setting_name.startswith('category_'):
enabled_categories.append(user_setting_name[len('category_'):])
elif user_setting_name.startswith('plugin_'):
disabled_plugins.append(user_setting_name)
else:
self.unknown_params[user_setting_name] = user_setting
self.key_value_settings['categories'].parse_form(enabled_categories)
self.engines.parse_form(disabled_engines)
self.plugins.parse_form(disabled_plugins)
# cannot be used in case of engines or plugins
def get_value(self, user_setting_name):
if user_setting_name in self.key_value_settings:
return self.key_value_settings[user_setting_name].get_value()
def save(self, resp):
for user_setting_name, user_setting in self.key_value_settings.iteritems():
user_setting.save(user_setting_name, resp)
self.engines.save(resp)
self.plugins.save(resp)
for k, v in self.unknown_params.items():
resp.set_cookie(k, v, max_age=COOKIE_MAX_AGE)
return resp
| agpl-3.0 | 6,231,365,305,246,020,000 | 37.5625 | 114 | 0.56591 | false |
googleapis/python-aiplatform | samples/snippets/predict_text_sentiment_analysis_sample_test.py | 1 | 1113 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import predict_text_sentiment_analysis_sample
ENDPOINT_ID = "7811563922418302976" # sentiment analysis endpoint
PROJECT_ID = os.getenv("BUILD_SPECIFIC_GCLOUD_PROJECT")
content = "The Chicago Bears is a great football team!"
def test_ucaip_generated_predict_text_sentiment_analysis_sample(capsys):
predict_text_sentiment_analysis_sample.predict_text_sentiment_analysis_sample(
content=content, project=PROJECT_ID, endpoint_id=ENDPOINT_ID
)
out, _ = capsys.readouterr()
assert "sentiment" in out
| apache-2.0 | -5,785,958,710,520,507,000 | 32.727273 | 82 | 0.758311 | false |
RuiNascimento/krepo | script.module.lambdascrapers/lib/lambdascrapers/sources_placenta/en_placenta-1.7.8/to_be_fixed/needsfixing/movie25.py | 1 | 6150 | # -*- coding: UTF-8 -*-
#######################################################################
# ----------------------------------------------------------------------------
# "THE BEER-WARE LICENSE" (Revision 42):
# @Daddy_Blamo wrote this file. As long as you retain this notice you
# can do whatever you want with this stuff. If we meet some day, and you think
# this stuff is worth it, you can buy me a beer in return. - Muad'Dib
# ----------------------------------------------------------------------------
#######################################################################
# Addon Name: Placenta
# Addon id: plugin.video.placenta
# Addon Provider: Mr.Blamo
import re,urllib,urlparse,json,base64
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import directstream
from resources.lib.modules import source_utils
class source:
def __init__(self):
self.priority = 0
self.language = ['en']
self.domains = ['movie25.ph', 'movie25.hk', 'tinklepad.is', 'tinklepad.ag', 'movie25.ag','5movies.to','movie25.unblocked.tv']
self.base_link = 'http://5movies.to'
self.search_link = '/search.php?q=%s'
self.search_link_2 = 'https://www.googleapis.com/customsearch/v1element?key=AIzaSyCVAXiUzRYsML1Pv6RwSG1gunmMikTzQqY&rsz=filtered_cse&num=10&hl=en&cx=008492768096183390003:0ugusjabnlq&googlehost=www.google.com&q=%s'
self.video_link = '/getlink.php?Action=get&lk=%s'
def matchAlias(self, title, aliases):
try:
for alias in aliases:
if cleantitle.get(title) == cleantitle.get(alias['title']):
return True
except:
return False
def movie(self, imdb, title, localtitle, aliases, year):
try:
aliases.append({'country': 'us', 'title': title})
url = {'imdb': imdb, 'title': title, 'year': year, 'aliases': aliases}
url = urllib.urlencode(url)
return url
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
aliases.append({'country': 'us', 'title': tvshowtitle})
url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'year': year, 'aliases': aliases}
url = urllib.urlencode(url)
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if url == None: return
url = urlparse.parse_qs(url)
url = dict([(i, url[i][0]) if url[i] else (i, '') for i in url])
url['title'], url['premiered'], url['season'], url['episode'] = title, premiered, season, episode
url = urllib.urlencode(url)
return url
except:
return
def _search(self, title, year, aliases, headers):
try:
q = urlparse.urljoin(self.base_link, self.search_link % urllib.quote_plus(cleantitle.getsearch(title)))
r = client.request(q)
r = client.parseDOM(r, 'div', attrs={'class':'ml-img'})
r = zip(client.parseDOM(r, 'a', ret='href'), client.parseDOM(r, 'img', ret='alt'))
url = [i for i in r if cleantitle.get(title) == cleantitle.get(i[1]) and year in i[1]][0][0]
return url
except:
pass
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
aliases = eval(data['aliases'])
headers = {}
title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title']
year = data['year']
if 'tvshowtitle' in data:
episode = data['episode']
season = data['season']
url = self._search(data['tvshowtitle'], data['year'], aliases, headers)
url = url.replace('online-free','season-%s-episode-%s-online-free'%(season,episode))
else:
episode = None
year = data['year']
url = self._search(data['title'], data['year'], aliases, headers)
url = url if 'http' in url else urlparse.urljoin(self.base_link, url)
result = client.request(url);
result = client.parseDOM(result, 'li', attrs={'class':'link-button'})
links = client.parseDOM(result, 'a', ret='href')
i = 0
for l in links:
if i == 10: break
try:
l = l.split('=')[1]
l = urlparse.urljoin(self.base_link, self.video_link%l)
result = client.request(l, post={}, headers={'Referer':url})
u = result if 'http' in result else 'http:'+result
if 'google' in u:
valid, hoster = source_utils.is_host_valid(u, hostDict)
urls, host, direct = source_utils.check_directstreams(u, hoster)
for x in urls: sources.append({'source': host, 'quality': x['quality'], 'language': 'en', 'url': x['url'], 'direct': direct, 'debridonly': False})
else:
valid, hoster = source_utils.is_host_valid(u, hostDict)
if not valid: continue
try:
u.decode('utf-8')
sources.append({'source': hoster, 'quality': 'SD', 'language': 'en', 'url': u, 'direct': False, 'debridonly': False})
i+=1
except:
pass
except:
pass
return sources
except:
return sources
def resolve(self, url):
if 'google' in url:
return directstream.googlepass(url)
else:
return url
| gpl-2.0 | -2,381,739,724,943,146,500 | 41.123288 | 222 | 0.504228 | false |
alejob/mdanalysis | testsuite/MDAnalysisTests/plugins/memleak.py | 1 | 3529 | # -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 fileencoding=utf-8
#
# MDAnalysis --- http://www.mdanalysis.org
# Copyright (c) 2006-2016 The MDAnalysis Development Team and contributors
# (see the file AUTHORS for the full list of names)
#
# Released under the GNU Public Licence, v2 or any higher version
#
# Please cite your use of MDAnalysis in published work:
#
# R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
# D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
# MDAnalysis: A Python package for the rapid analysis of molecular dynamics
# simulations. In S. Benthall and S. Rostrup editors, Proceedings of the 15th
# Python in Science Conference, pages 102-109, Austin, TX, 2016. SciPy.
#
# N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and O. Beckstein.
# MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics Simulations.
# J. Comput. Chem. 32 (2011), 2319--2327, doi:10.1002/jcc.21787
#
"""Test plugin that reports memleaks on a test-by-test basis.
The plugin works by clearing a test's namespace after it has run, and then
forcing a garbage collection round and checking for uncollectable objects.
Implementation uses the startTest hook to register our memory leak check
as a cleanup to the test.
"""
import gc
import nose
from nose.plugins.errorclass import ErrorClass, ErrorClassPlugin
_leakedobjs = set() # We must keep track of seen leaks to avoid raising multiple
# errors for the same ones.
def memleak_check(test, pretest_attrs):
"""Memory leak check, to be registered as a :class:`unittest.TestCase` cleanup method.
Registration can be done using :meth:`unittest.TestCase.addCleanup`. The
test itself and its `__dict__.keys()` must be registered as arguments.
This function works by clearing a test's namespace after it has run, and
then forcing a garbage collection round and checking for uncollectable
objects.
"""
attrs = []
for attrname in pretest_attrs:
try:
attrs.append((attrname, getattr(test, attrname)))
except AttributeError:
pass
test.__dict__.clear()
gc.collect()
latest_leaks = [ obj for obj in gc.garbage if obj not in _leakedobjs ]
_leakedobjs.update(gc.garbage)
# Restore the pre-test stuff
for name, val in attrs:
setattr(test, name, val)
if latest_leaks:
raise MemleakError("GC failed to collect the following: {0}".format(latest_leaks))
class MemleakError(Exception):
"""Exception raised internally to mark the test as memleaking."""
pass
class Memleak(ErrorClassPlugin):
"""Report memleaks on a test-by-test basis."""
name = "memleak"
enabled = False
memleak = ErrorClass(MemleakError,
label='MEMLEAK',
isfailure=True)
def configure(self, options, conf):
super(Memleak, self).configure(options, conf)
self.config = conf # This will let other tests know about config settings.
def beforeTest(self, test):
# We register our check function and record with it the names that
# the test has at birth so that later we only wipe stuff created
# during testing.
# We really don't want a smart dict_keys object here, as it'd be
# changed during testing.
test.test.addCleanup(memleak_check, test.test, list(test.test.__dict__.keys()))
plugin_class = Memleak
| gpl-2.0 | -833,070,304,503,999,500 | 38.211111 | 90 | 0.688014 | false |
nukeador/20up | 20up.py | 1 | 15586 | #! /usr/bin/python
# -*- coding: utf-8 -*-
"""
Copyright (C) 2013 Borja Menendez Moreno
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Authors: Borja Menéndez Moreno <[email protected]>
Program for the backup of Tuenti, a Spanish social network.
This program downloads all of the photos, comments, private messages and
friends' information of a specific user.
"""
import math, os, httplib, requests, string
import re, getpass, urllib2, hashlib, unicodedata
from time import sleep
from APtuentI import APtuentI
from MyHTMLParser import MyHTMLParser
version = '1.1'
web = 'http://bmenendez.github.io/20up'
twitter = '@borjamonserrano'
email = '[email protected]'
appkey = 'MDI3MDFmZjU4MGExNWM0YmEyYjA5MzRkODlmMjg0MTU6MC4xMzk0ODYwMCAxMjYxMDYwNjk2'
statusText = 'Utilizando 20p para descargarme todas '
statusText += 'mis fotos de Tuenti :) ' + web
def printWelcome():
os.system('cls' if os.name == 'nt' else 'clear')
print '-' * 60
print '| 20up version ' + version
print '|'
print '| Gracias por descargar esta aplicacion'
print '| Espero que te sea de gran utilidad :)'
print '| Si tienes alguna duda, tienes toda la info en:'
print '|'
print '| ' + web
print '|'
print '| Tambien puedes resolver tus dudas en twitter: ' + twitter
print '| Asi como por e-mail a traves de: ' + email
print '-' * 60
def printGoodBye():
os.system('cls' if os.name == 'nt' else 'clear')
print '-' * 60
print '| 20up version ' + version
print '| Gracias por haber utilizado 20up ' + version
print '| Espero que te haya sido de gran utilidad :)'
print '| Si tienes alguna duda, tienes toda la info en:'
print '|'
print '| ' + web
print '|'
print '| Tambien puedes resolver tus dudas en twitter: ' + twitter
print '| Asi como por e-mail a traves de: ' + email
print '|'
print '| Si quieres, puedo cambiar tu estado Tuenti para advertir que'
print '| has utilizado 20up y que tus contactos conozcan la aplicacion.'
print '| El mensaje que se pondra sera: ' + statusText
print '| 1 - Si'
print '| Otro - No'
print '-' * 60
def printMenu():
os.system('cls' if os.name == 'nt' else 'clear')
print '-' * 60
print '| 20up version ' + version
print '|'
print '| Pulsa un numero en funcion de lo que quieras hacer:'
print '| 1 - Backup total (fotos, privados, comentarios y usuarios)'
print '| 2 - Backup de fotos'
print '| 3 - Backup de privados'
print '| 4 - Backup de comentarios'
print '| 5 - Backup de usuarios'
print '| 6 - Ayuda'
print '| 7 - Salir'
print '-' * 60
def printStarting(text):
os.system('cls' if os.name == 'nt' else 'clear')
print '-' * 60
print '| 20up version ' + version
print '|'
print '| Comenzando el backup de ' + text + '...'
print '-' * 60
def printEnding(text):
os.system('cls' if os.name == 'nt' else 'clear')
print '-' * 60
print '| 20up version ' + version
print '|'
print '| Terminado el backup de ' + text + '...'
raw_input('| Pulsa ENTER para continuar')
print '-' * 60
def printHelp():
os.system('cls' if os.name == 'nt' else 'clear')
print '-' * 60
print '| 20up version ' + version
print '|'
print '| 20up es una aplicacion para hacer un backup de tu Tuenti.'
print '| 20up no se responsabiliza de los usos derivados que se le'
print '| puedan dar a esta aplicacion.'
print '| 20up tiene como proposito poder realizar un backup de tu'
print '| cuenta de usuario de Tuenti, de forma que tendras todas tus'
print '| fotos, mensajes privados, comentarios de tablon y datos de tus'
print '| contactos en tu ordenador.'
print '| 20up no almacena ni envia tu correo o contrasenya a terceras'
print '| personas o cuentas de Tuenti.'
print '| Por favor, si tienes alguna duda, visita la web:'
print '|'
print '| ' + web
print '|'
print '| Tambien puedes resolver tus dudas en twitter: ' + twitter
print '| Asi como por e-mail a traves de: ' + email
print '-' * 60
def getData(withError):
os.system('cls' if os.name == 'nt' else 'clear')
print '-' * 60
if withError:
print '| Parece que no has introducido bien tus datos'
print '| Por favor, escribe de nuevo...'
else:
print '| Para poder hacer el backup necesito un poco mas'
print '| de informacion sobre tu cuenta de Tuenti...'
print '|'
print '| Esta informacion no se almacenara en ningun sitio'
print '| ni se enviara a ningun lado, solamente se requiere'
print '| para la conexion con tu cuenta de Tuenti :)'
print
email = raw_input('E-mail: ')
while not re.match(r'[^@]+@[^@]+\.[^@]+', email):
email = raw_input('El e-mail no es valido, intenta de nuevo: ')
password = getpass.getpass()
print '-' * 60
return email, password
def backupTotal(myTuenti, email, password):
backupPrivateMessages(myTuenti, email, password)
backupComments(myTuenti)
backupUsers(myTuenti)
backupPhotos(myTuenti)
def backupPhotos(myTuenti):
printStarting('fotos')
print '| Obteniendo los nombres de tus albumes...'
dicPhotos = {}
i = 0
totalPhotos = 0
while True:
albums = myTuenti.getUserAlbums(i)
counter = 0
for album in albums[0]:
counter += 1
dicPhotos[album] = [albums[0][album]['name'], albums[0][album]['size']]
totalPhotos += int(albums[0][album]['size'])
if counter < 20:
break
sleep(0.5)
i += 1
print '| Nombre de los albumes obtenido'
rootPath = os.getcwdu()
theJoinPath = os.path.join(rootPath, 'fotos')
if not os.path.exists(theJoinPath):
print '| Creando directorio donde se alojaran las fotos...'
os.makedirs(theJoinPath)
print '| Directorio creado'
s = requests.Session()
totalCounter = 0
for album in dicPhotos:
albumName = unicodedata.normalize('NFKD', dicPhotos[album][0])
albumName = re.sub('[^a-zA-Z0-9\n\.]', '-', albumName)
size = dicPhotos[album][1]
albumPath = os.path.join(theJoinPath, albumName)
if not os.path.exists(albumPath):
print '| Creando directorio donde se alojaran las fotos'
print '| del album ' + albumName + '...'
os.makedirs(albumPath)
print '| Directorio creado'
os.chdir(albumPath)
myCounter = int(size)
maxFill = len(str(size))
iters = myCounter / 20.0
if math.fmod(iters, 1) != 0.0:
iters += 1
iters = int(iters)
totalPhotosAlbum = myCounter
print '|'
print '| Descargando fotos del album ' + albumName + '...'
print '|'
partialCounter = 0
for i in range(0, iters):
mf = myTuenti.getAlbumPhotos(album, i)
for elem in mf[0]['album']:
url = elem['photo_url_600']
title = unicodedata.normalize('NFKD', elem['title'])
title = re.sub('[^a-zA-Z0-9\n\.]', '-', title)
partialCounter += 1
totalCounter += 1
fileName = string.zfill(myCounter, maxFill) + '_' + title + '.jpg'
if not os.path.exists(fileName):
partialPerc = 100 * partialCounter / totalPhotosAlbum
totalPerc = 100 * totalCounter / totalPhotos
percs = '[' + str(totalPerc) + '% total] ['
percs += str(partialPerc) + '% album] '
print '| ' + percs + 'Descargando foto ' + title + '...'
while not os.path.exists(fileName):
with open(fileName, 'wb') as handle:
r = s.get(url, verify=False)
for block in r.iter_content(1024):
if not block:
break
handle.write(block)
sleep(0.5)
myCounter -= 1
os.chdir(theJoinPath)
os.chdir(rootPath)
def backupPrivateMessages(myTuenti, email, password):
printStarting('mensajes privados')
print '| Obteniendo identificadores de tus mensajes privados'
print '| (esto llevara algun tiempo)'
messages = myTuenti.getInbox(0)
totalMessages = int(messages[0]['num_threads'])
keys = []
maxFill = len(str(totalMessages))
iters = totalMessages / 10.0
if math.fmod(iters, 1) != 0.0:
iters += 1
iters = int(iters)
for i in range(0, iters):
messages = myTuenti.getInbox(i)
for message in messages[0]['threads']:
keys.append(message['key'])
sleep(0.5)
s = requests.Session()
r = s.get('https://m.tuenti.com/?m=Login', verify=False)
csrf = re.findall('name="csrf" value="(.*?)"', r.text)[0]
data = { 'csrf': csrf, 'tuentiemailaddress': email, 'password': password, 'remember': 1 }
s.post('https://m.tuenti.com/?m=Login&f=process_login', data)
r = s.get("https://m.tuenti.com/?m=Profile&func=my_profile", verify=False)
if r.text.find('email') != -1:
print '| E-mail o password incorrectos'
raw_input('| Pulsa ENTER para continuar')
return
rootPath = os.getcwdu()
theJoinPath = os.path.join(rootPath, 'privados')
if not os.path.exists(theJoinPath):
print '| Creando directorio donde se alojaran los mensajes privados...'
os.makedirs(theJoinPath)
print '| Directorio creado'
os.chdir(theJoinPath)
counter = 0
parser = MyHTMLParser()
for key in keys:
counter += 1
percent = 100 * counter / totalMessages
print '| [' + str(percent) + '%] Descargando mensaje ' + \
str(counter) + ' de ' + str(totalMessages) + '...'
urlName = 'https://m.tuenti.com/?m=messaging&func=view_thread&thread_id='
urlName += key + '&box=inbox&view_full=1'
r = s.get(urlName, verify=False)
sleep(0.5)
parser.setFile(string.zfill(counter, maxFill))
parser.feed(r.text)
os.chdir(rootPath)
def backupComments(myTuenti):
printStarting('comentarios')
i = 0
counter = 0
totalCount = 0
fileToWrite = open('comentarios.txt', 'w')
while True:
mes = myTuenti.getWall(i)
counter = 0
try:
for post in mes[0]['posts']:
totalCount += 1
print '| Descargando comentario ' + str(totalCount) + '...'
text = '*' * 60
text += '\r\n'
counter += 1
for anElem in post['body']:
text += post['author']['name'] + ' '
text += post['author']['surname'] + ': '
text += anElem['plain']
text += '\r\n'
try:
if post['parent']['body']:
text += '-' * 20
text += '\r\n'
for elem in post['parent']['body']:
text += elem['plain']
text += '\r\n'
counter += 1
except:
pass
fileToWrite.write(text.encode('utf-8'))
if counter == 0:
break;
sleep(0.5)
i += 1
except:
break
fileToWrite.close()
def backupUsers(myTuenti):
printStarting('usuarios')
print '| Obteniendo todos tus contactos'
totalFriends = myTuenti.getFriendsData()
fileToWrite = open('usuarios.txt', 'w')
text = ''
for friend in totalFriends[0]['friends']:
name = friend['name']
surname = friend['surname']
text += name + ' ' + surname
print '| Obteniendo datos de ' + name + ' ' + surname + '...'
friendId = friend['id']
data = myTuenti.getUsersData(friendId)
if data[0]['users'][0]['birthday']:
text += ' (' + data[0]['users'][0]['birthday'] + ')'
if data[0]['users'][0]['phone_number']:
text += ': ' + data[0]['users'][0]['phone_number']
text += '\r\n'
sleep(0.5)
fileToWrite.write(text.encode('utf-8'))
fileToWrite.close()
def main():
email, password = getData(False)
myTuenti = APtuentI()
while True:
try:
login = myTuenti.doLogin()
passcode = hashlib.md5(login[0]['challenge'] + \
hashlib.md5(password).hexdigest()).hexdigest()
out = myTuenti.getSession(login[0]['timestamp'], \
login[0]['seed'], passcode, appkey, email)
myTuenti.setSessionID(out[0]['session_id'])
break
except:
email, password = getData(True)
respuesta = '0'
while respuesta != '7':
printMenu()
respuesta = raw_input('> ')
if respuesta == '1':
backupTotal(myTuenti, email, password)
printEnding('todo')
elif respuesta == '2':
backupPhotos(myTuenti)
printEnding('fotos')
elif respuesta == '3':
backupPrivateMessages(myTuenti, email, password)
printEnding('mensajes privados')
elif respuesta == '4':
backupComments(myTuenti)
printEnding('comentarios')
elif respuesta == '5':
backupUsers(myTuenti)
printEnding('usuarios')
elif respuesta == '6':
printHelp()
raw_input('> Presiona ENTER para continuar')
elif respuesta == '7':
pass
else:
print 'No has elegido una opcion valida'
printGoodBye()
respuesta = raw_input('> ')
if respuesta == '1':
myTuenti.setUserStatus(statusText)
print '| Hasta pronto :)'
if __name__ == '__main__':
printWelcome()
raw_input('| Pulsa ENTER para continuar')
while True:
try:
main()
break
except urllib2.URLError:
print '|'
print '| No hay conexion a internet'
print '|'
break
except KeyboardInterrupt:
print
print '|'
print '| Cerrando aplicacion...'
print '|'
break
except Exception, e:
print '|'
print '| Ha ocurrido un error inesperado:', e
print '|'
raw_input('| Pulsa ENTER para continuar')
| gpl-3.0 | -1,007,471,043,088,747,600 | 33.028384 | 93 | 0.548091 | false |
angr/angr | angr/state_plugins/heap/heap_base.py | 1 | 6439 | from ..plugin import SimStatePlugin
from ...errors import SimMemoryError
from .. import sim_options as opts
import logging
l = logging.getLogger("angr.state_plugins.heap.heap_base")
# TODO: derive heap location from SimOS and binary info for something more realistic (and safe?)
DEFAULT_HEAP_LOCATION = 0xc0000000
DEFAULT_HEAP_SIZE = 64 * 4096
class SimHeapBase(SimStatePlugin):
"""
This is the base heap class that all heap implementations should subclass. It defines a few handlers for common
heap functions (the libc memory management functions). Heap implementations are expected to override these
functions regardless of whether they implement the SimHeapLibc interface. For an example, see the SimHeapBrk
implementation, which is based on the original libc SimProcedure implementations.
:ivar heap_base: the address of the base of the heap in memory
:ivar heap_size: the total size of the main memory region managed by the heap in memory
:ivar mmap_base: the address of the region from which large mmap allocations will be made
"""
def __init__(self, heap_base=None, heap_size=None):
super().__init__()
self.heap_base = heap_base if heap_base is not None else DEFAULT_HEAP_LOCATION
self.heap_size = heap_size if heap_size is not None else DEFAULT_HEAP_SIZE
self.mmap_base = self.heap_base + self.heap_size * 2
def copy(self, memo):
o = super().copy(memo)
o.heap_base = self.heap_base
o.heap_size = self.heap_size
o.mmap_base = self.mmap_base
return o
def _conc_alloc_size(self, sim_size):
"""
Concretizes a size argument, if necessary, to something that makes sense when allocating space. Here we just
maximize its potential size up to the maximum variable size specified in the libc plugin.
TODO:
Further consideration of the tradeoffs of this approach is probably warranted. SimHeapPTMalloc especially makes
a lot of different concretization strategy assumptions, but this function handles one of the more important
problems that any heap implementation will face: how to decide the amount of space to allocate upon request for
a symbolic size. Either we do as we do here and silently constrain the amount returned to a default max value,
or we could add a path constraint to the state to prevent exploration of any paths that would have legitimately
occurred given a larger allocation size.
The first approach (the silent maximum) has its benefit in that the explored state space will not be
constrained. Sometimes this could work out, as when an allocation is returned that is smaller than requested but
which the program doesn't end up making full use of anyways. Alternatively, this lack of fidelity could cause
the program to overwrite other allocations made, since it should be able to assume the allocation is as large as
it requested it be.
The second approach (the path constraint) has its benefit in that no paths will be explored that *could* fail
when an allocation is made too small. On the other hand, as stated above, some of these paths might not have
failed anyways, and doing this causes us to lose the opportunity to explore those paths.
Perhaps these behaviors could be parameterized in the future?
"""
if self.state.solver.symbolic(sim_size):
size = self.state.solver.max_int(sim_size)
if size > self.state.libc.max_variable_size:
l.warning("Allocation request of %d bytes exceeded maximum of %d bytes; allocating %d bytes",
size, self.state.libc.max_variable_size, self.state.libc.max_variable_size)
size = self.state.libc.max_variable_size
else:
size = self.state.solver.eval(sim_size)
return size
def _malloc(self, sim_size):
"""
Handler for any libc `malloc` SimProcedure call. If the heap has faithful support for `malloc`, it ought to be
implemented in a `malloc` function (as opposed to the `_malloc` function).
:param sim_size: the amount of memory (in bytes) to be allocated
"""
raise NotImplementedError("%s not implemented for %s" % (self._malloc.__func__.__name__,
self.__class__.__name__))
def _free(self, ptr):
"""
Handler for any libc `free` SimProcedure call. If the heap has faithful support for `free`, it ought to be
implemented in a `free` function (as opposed to the `_free` function).
:param ptr: the location in memory to be freed
"""
raise NotImplementedError("%s not implemented for %s" % (self._free.__func__.__name__,
self.__class__.__name__))
def _calloc(self, sim_nmemb, sim_size):
"""
Handler for any libc `calloc` SimProcedure call. If the heap has faithful support for `calloc`, it ought to be
implemented in a `calloc` function (as opposed to the `_calloc` function).
:param sim_nmemb: the number of elements to allocated
:param sim_size: the size of each element (in bytes)
"""
raise NotImplementedError("%s not implemented for %s" % (self._calloc.__func__.__name__,
self.__class__.__name__))
def _realloc(self, ptr, size):
"""
Handler for any libc `realloc` SimProcedure call. If the heap has faithful support for `realloc`, it ought to be
implemented in a `realloc` function (as opposed to the `_realloc` function).
:param ptr: the location in memory to be reallocated
:param size: the new size desired for the allocation
"""
raise NotImplementedError("%s not implemented for %s" % (self._realloc.__func__.__name__,
self.__class__.__name__))
def init_state(self):
if opts.ABSTRACT_MEMORY in self.state.options:
return
try:
self.state.memory.permissions(self.heap_base)
except SimMemoryError:
l.debug("Mapping base heap region")
self.state.memory.map_region(self.heap_base, self.heap_size, 3)
| bsd-2-clause | -494,883,889,625,038,900 | 50.512 | 120 | 0.649635 | false |
OSU-CS419/viewmydb | viewmydb/DBcreatetable.py | 1 | 9454 | #!/usr/bin/python
import urwid
import mainview
"""
NOTES
-----
This module builds the widget to allow the user to create a table in a database.
"""
class CreateTableInfo:
def __init__(self):
self.count = None
self.table_name = ""
self.table_fields = None
self.query_string = ""
self.atr_name = ""
self.atr_type = ""
self.atr_null = False
self.atr_primarykey = False
self.atr_unique = False
self.atr_none = True
def show_db_createtable(frame, body, main_body, user_info):
#used to easily insert a blank line widget
blank = urwid.Divider()
#create class instance to hold table creation input
table_info = CreateTableInfo()
#signal handler for the create button
def create_btn_press(button):
#store number of fields in table_info
table_info.table_fields = fields_num.value()
#check for errors
if table_info.table_fields == 0 or table_info.table_name == "":
text_error.original_widget = urwid.AttrWrap( urwid.Text(u"Enter in both a name and number of fields."), 'error')
elif table_info.table_fields > 20:
text_error.original_widget = urwid.AttrWrap( urwid.Text(u"The max number of fields is 20"), 'error')
else:
#user input was correct, go to next view
second_createtable(frame, body, main_body, user_info, table_info)
#signal handler for text input, stores input information from user
def edit_change_event(self, text):
table_info.table_name = text
#variables to hold text to show user for login view
text_error = urwid.AttrWrap( urwid.Text(u""), 'body')
text_1 = urwid.Text(u"Create a table below:")
text_2 = urwid.Text(u"(the number of fields must be less than 20...)")
#setting up the edit input widgets for database name and password
table_name_edit = urwid.Edit(u"Table Name: ", "")
urwid.connect_signal(table_name_edit, 'change', edit_change_event)
table_name_edit = urwid.AttrWrap(table_name_edit, 'main_sel', 'main_self')
fields_num = urwid.IntEdit(u"Number of Table Fields: ")
table_fields_edit = urwid.AttrWrap(fields_num, 'main_sel', 'main_self')
#create button
table_nextstep_btn = urwid.AttrWrap( urwid.Button(u"Next Step", create_btn_press), 'btnf', 'btn')
#This is the pile widget that holds all of the main body widgets
create_table = urwid.WidgetPlaceholder( urwid.Padding(
urwid.Pile([
text_error,
blank,
text_1,
blank,
urwid.Padding( urwid.Pile([table_name_edit, table_fields_edit]), left=5, width=45),
blank,
text_2,
blank,
urwid.Padding(table_nextstep_btn, left=5, width=13)
]), left=5, right=5))
return create_table
def second_createtable(frame, body, main_body, user_info, table_info):
blank = urwid.Divider()
table_info.count = 0 #holds count on what attribute is being edited
text_1 = urwid.Text([u"Creating Table: ", table_info.table_name])
edit_num = urwid.Text([u"Now editing attribute number: ", str(table_info.count + 1), ' / ', str(table_info.table_fields)])
#clear out query string first
table_info.query_string = ""
#start creating the query string
table_info.query_string += 'CREATE TABLE ' + table_info.table_name + ' (\n'
#error box
error_box = urwid.AttrMap( urwid.Text(u""), 'main_sel')
#signal handler for try again button
def try_again(button):
second_createtable(frame, body, main_body, user_info, table_info)
#signal handler for the next attribute button
def next_atr(button):
if table_info.atr_name == "" or table_info.atr_type == "":
error_box.original_widget = urwid.AttrWrap( urwid.Text(u"You must enter a name and type."), 'error')
else:
error_box.original_widget = urwid.AttrWrap( urwid.Text(u""), 'main_sel')
table_info.query_string += table_info.atr_name + ' ' + table_info.atr_type
if table_info.atr_null == True:
table_info.query_string += ' NOT NULL'
if table_info.atr_primarykey == True:
table_info.query_string += ' PRIMARY KEY'
if table_info.atr_unique == True:
table_info.query_string += ' UNIQUE'
#increment count to reflect new addition of data
table_info.count += 1
if table_info.count < table_info.table_fields:
#call function to bring up next form
next_form()
else:
#call function to execute create table query
create_table()
#next button
atr_next_btn = urwid.AttrWrap( urwid.Button(u"Next", next_atr), 'main_sel', 'main_self')
atr_next_btn = urwid.WidgetPlaceholder(atr_next_btn)
#signal handler for edit field events
def edit_change_event(self, text):
if self.caption == "Name: ":
table_info.atr_name = text
elif self.caption == "Type: ":
table_info.atr_type = text
#widget for attribute name edit field
atr_name_edit = urwid.Edit(u"Name: ", "")
urwid.connect_signal(atr_name_edit, 'change', edit_change_event)
atr_name_edit = urwid.AttrWrap(atr_name_edit, 'main_sel', 'main_self')
#widget for type edit field
atr_type_edit = urwid.Edit(u"Type: ", "")
urwid.connect_signal(atr_type_edit, 'change', edit_change_event)
atr_type_edit = urwid.AttrWrap(atr_type_edit, 'main_sel', 'main_self')
#signal handler for checkbox
def checkbox_change(self, state):
if state == True:
table_info.atr_null = True
else:
table_info.atr_null = False
#widget for null checkbox
null_checkbox = urwid.CheckBox(u"Not Null", state=False, on_state_change=checkbox_change)
null_checkbox = urwid.AttrWrap(null_checkbox, 'main_sel', 'main_self')
#signal handler for radio buttons
def radio_change(self, state):
if self.label == "Primary Key":
if state == True:
table_info.atr_primarykey = True
table_info.atr_unique = False
table_info.atr_none = False
elif self.label == "Unique":
if state == True:
table_info.atr_primarykey = False
table_info.atr_unique = True
table_info.atr_none = False
elif self.label == "None":
if state == True:
table_info.atr_primarykey = False
table_info.atr_unique = False
table_info.atr_none = True
#widgets for radio buttons
radio_list = []
primarykey_radio = urwid.AttrWrap( urwid.RadioButton(radio_list, u"Primary Key", False, on_state_change=radio_change), 'main_sel', 'main_self')
unique_radio = urwid.AttrWrap( urwid.RadioButton(radio_list, u"Unique", False, on_state_change=radio_change), 'main_sel', 'main_self')
none_radio = urwid.AttrWrap( urwid.RadioButton(radio_list, u"None", True, on_state_change=radio_change), 'main_sel', 'main_self')
#create button placeholder
table_create_btn = urwid.WidgetPlaceholder( urwid.Text(u""))
#signal handler for create button
def create_table():
table_info.query_string += '\n);'
#run query
query_status = user_info.db_obj.runquery(user_info.db_conn, table_info.query_string, False)
if query_status['success']:
#query was successful, show success message and change view
frame.footer = urwid.AttrWrap( urwid.Text(u" Table created successfully"), 'header')
mainview.show_main_view(frame, body, user_info)
else:
#query failed, show error message
error_box.original_widget = urwid.AttrWrap( urwid.Text(
[u"Query Failed. Select 'Try Again' below to re-enter attribute information, or 'Create Table' above to start over.\n\n", query_status['data'], "\nQUERY: ", table_info.query_string]), 'error')
#if attributes = 1, next button will still be there
if table_info.table_fields == 1:
attribute_box.focus_position = 2
atr_next_btn.original_widget = urwid.AttrWrap( urwid.Text(u""), 'main_sel')
#clear out create table button and make it try again button
table_create_btn.original_widget = urwid.AttrWrap( urwid.Button(u"Try Again", try_again), 'main_sel', 'main_self')
#controls the looping nature of the repetivie process of entering in data for attributes
def next_form():
#clear form
atr_name_edit.set_edit_text(u"")
atr_type_edit.set_edit_text(u"")
null_checkbox.set_state(False)
none_radio.set_state(True)
#change focus to name input field
attribute_box.focus_position = 2
#change attribute count to show current attribute
edit_num.set_text([u"Now editing attribute number: ", str(table_info.count + 1), ' / ', str(table_info.table_fields)])
#keep processing data
table_info.query_string += ',\n'
if table_info.count == table_info.table_fields - 1:
#this is the last attribute to edit
#remove next button and replace with create button
atr_next_btn.original_widget = urwid.AttrWrap( urwid.Text(u""), 'main_sel')
table_create_btn.original_widget = urwid.AttrWrap( urwid.Button(u"Create", next_atr), 'main_sel', 'main_self')
attribute_box = urwid.Pile([
error_box,
blank,
atr_name_edit,
blank,
atr_type_edit,
blank,
null_checkbox,
blank,
primarykey_radio,
unique_radio,
none_radio,
blank,
urwid.Padding(atr_next_btn, left=15, width=10)
])
create_attribute = urwid.WidgetPlaceholder( urwid.Padding(
urwid.Pile([
text_1,
blank,
edit_num,
blank,
urwid.LineBox(attribute_box),
blank,
urwid.Padding(table_create_btn, left=5, width=10)
]), left=5, right=5))
main_body.original_widget = create_attribute
| mit | -7,058,413,767,941,658,000 | 34.541353 | 201 | 0.663317 | false |
lrocheWB/navitia | source/jormungandr/jormungandr/realtime_schedule/tests/synthese_test.py | 1 | 5246 | # coding=utf-8
# Copyright (c) 2001-2016, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# IRC #navitia on freenode
# https://groups.google.com/d/forum/navitia
# www.navitia.io
import datetime
from dateutil.parser import parse
import mock
import pytz
from jormungandr.realtime_schedule.synthese import Synthese
import validators
from jormungandr.realtime_schedule.tests.utils import MockRoutePoint
def make_url_test():
synthese = Synthese(id='tata', timezone='Europe/Paris', service_url='http://bob.com/')
url = synthese._make_url(MockRoutePoint(route_id='route_tata', line_id='line_toto', stop_id='stop_tutu'))
# it should be a valid url
assert validators.url(url)
assert url.startswith('http://bob.com/')
assert 'SERVICE=tdg' in url
assert 'roid=stop_tutu' in url
def make_url_invalid_code_test():
"""
test make_url when RoutePoint does not have a mandatory code
we should not get any url
"""
synthese = Synthese(id='tata', timezone='Europe/Paris', service_url='http://bob.com/')
url = synthese._make_url(MockRoutePoint(route_id='route_tata', line_id='line_toto', stop_id=None))
assert url is None
class MockResponse(object):
def __init__(self, data, status_code, url, *args, **kwargs):
self.content = data
self.text = data
self.status_code = status_code
self.url = url
class MockRequests(object):
def __init__(self, responses):
self.responses = responses
def get(self, url, *args, **kwargs):
return MockResponse(self.responses[url][0], self.responses[url][1], url)
def mock_good_response():
return """
<timeTable>
<journey routeId="route_tata" dateTime="2016-Mar-29 13:37:00" realTime="yes">
<stop id="stop_tutu"/>
</journey>
<journey routeId="route_tata" dateTime="2016-Mar-29 13:47:00" realTime="yes">
<stop id="stop_tutu"/>
</journey>
<journey routeId="route_toto" dateTime="2016-Mar-29 13:48:00" realTime="yes">
<stop id="stop_tutu"/>
</journey>
<journey routeId="route_tata" dateTime="2016-Mar-29 13:57:00" realTime="yes">
<stop id="stop_tutu"/>
</journey>
</timeTable>
"""
class MockRequests(object):
def __init__(self, responses):
self.responses = responses
def get(self, url, *args, **kwargs):
return MockResponse(self.responses[url][0], self.responses[url][1], url)
def next_passage_for_route_point_test():
"""
test the whole next_passage_for_route_point
mock the http call to return a good response, we should get some next_passages
"""
synthese = Synthese(id='tata', timezone='UTC', service_url='http://bob.com/')
mock_requests = MockRequests({
'http://bob.com/?SERVICE=tdg&roid=stop_tutu':
(mock_good_response(), 200)
})
route_point = MockRoutePoint(route_id='route_tata', line_id='line_toto', stop_id='stop_tutu')
with mock.patch('requests.get', mock_requests.get):
passages = synthese.next_passage_for_route_point(route_point)
assert len(passages) == 3
assert passages[0].datetime == datetime.datetime(2016, 3, 29, 13, 37, tzinfo=pytz.UTC)
assert passages[1].datetime == datetime.datetime(2016, 3, 29, 13, 47, tzinfo=pytz.UTC)
assert passages[2].datetime == datetime.datetime(2016, 3, 29, 13, 57, tzinfo=pytz.UTC)
def next_passage_for_route_point_failure_test():
"""
test the whole next_passage_for_route_point
the timeo's response is in error (status = 404), we should get 'None'
"""
synthese = Synthese(id='tata', timezone='UTC', service_url='http://bob.com/')
mock_requests = MockRequests({
'http://bob.com/?SERVICE=tdg&roid=stop_tutu':
(mock_good_response(), 404)
})
route_point = MockRoutePoint(route_id='route_tata', line_id='line_toto', stop_id='stop_tutu')
with mock.patch('requests.get', mock_requests.get):
passages = synthese.next_passage_for_route_point(route_point)
assert passages is None
def status_test():
synthese = Synthese(id='tata', timezone='UTC', service_url='http://bob.com/')
status = synthese.status()
assert status['id'] == 'tata'
| agpl-3.0 | 7,979,581,180,124,000,000 | 32.414013 | 109 | 0.672512 | false |
feigaochn/leetcode | p737_sentence_similarity_ii.py | 2 | 3720 | #!/usr/bin/env python
# coding: utf-8
"""
Given two sentences words1, words2 (each represented as an array of strings), and a list of similar word pairs pairs, determine if two sentences are similar.
For example, words1 = ["great", "acting", "skills"] and words2 = ["fine", "drama", "talent"] are similar, if the similar word pairs are pairs = [["great", "good"], ["fine", "good"], ["acting","drama"], ["skills","talent"]].
Note that the similarity relation is transitive. For example, if "great" and "good" are similar, and "fine" and "good" are similar, then "great" and "fine" are similar.
Similarity is also symmetric. For example, "great" and "fine" being similar is the same as "fine" and "great" being similar.
Also, a word is always similar with itself. For example, the sentences words1 = ["great"], words2 = ["great"], pairs = [] are similar, even though there are no specified similar word pairs.
Finally, sentences can only be similar if they have the same number of words. So a sentence like words1 = ["great"] can never be similar to words2 = ["doubleplus","good"].
Note:
The length of words1 and words2 will not exceed 1000.
The length of pairs will not exceed 2000.
The length of each pairs[i] will be 2.
The length of each words[i] and pairs[i][j] will be in the range [1, 20].
"""
class Solution(object):
def areSentencesSimilarTwo(self, words1, words2, pairs):
"""
:type words1: List[str]
:type words2: List[str]
:type pairs: List[List[str]]
:rtype: bool
"""
all_words = list(set(words1) | set(words2) | set([w for p in pairs for w in p])) + [None]
parent = dict(zip(all_words, all_words))
def grand_parent(w):
if parent[w] == w:
return w
else:
gp = grand_parent(parent[w])
parent[w] = gp
return gp
# print(parent)
for w1, w2 in pairs:
gpw1 = grand_parent(w1)
gpw2 = grand_parent(w2)
if gpw1 < gpw2:
parent[gpw2] = gpw1
else:
parent[gpw1] = gpw2
from itertools import zip_longest
return all(grand_parent(w1) == grand_parent(w2)
for w1, w2 in zip_longest(words1, words2))
if __name__ == '__main__':
sol = Solution().areSentencesSimilarTwo
print(sol(
words1=["great", "acting", "skills"],
words2=["fine", "drama", "talent"],
pairs=[["great", "good"], ["fine", "good"], ["acting", "drama"], ["skills", "talent"]]))
print(sol(
words1=["great", "acting", "skills", "talent"],
words2=["fine", "drama", "talent"],
pairs=[["great", "good"], ["fine", "good"], ["acting", "drama"], ["skills", "talent"]]))
print(sol(["an", "extraordinary", "meal"],
["one", "good", "dinner"],
[["great", "good"], ["extraordinary", "good"], ["well", "good"], ["wonderful", "good"],
["excellent", "good"], ["fine", "good"], ["nice", "good"], ["any", "one"], ["some", "one"],
["unique", "one"], ["the", "one"], ["an", "one"], ["single", "one"], ["a", "one"], ["truck", "car"],
["wagon", "car"], ["automobile", "car"], ["auto", "car"], ["vehicle", "car"], ["entertain", "have"],
["drink", "have"], ["eat", "have"], ["take", "have"], ["fruits", "meal"], ["brunch", "meal"],
["breakfast", "meal"], ["food", "meal"], ["dinner", "meal"], ["super", "meal"], ["lunch", "meal"],
["possess", "own"], ["keep", "own"], ["have", "own"], ["extremely", "very"], ["actually", "very"],
["really", "very"], ["super", "very"]]), '?= True')
| mit | 2,157,045,802,710,911,500 | 45.5 | 223 | 0.548387 | false |
Subsets and Splits