code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
# Author: Nic Wolfe <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
import sickbeard
from sickbeard.common import countryList, showLanguages
from sickbeard.helpers import sanitizeSceneName
from sickbeard.scene_exceptions import get_scene_exceptions
from sickbeard import logger
from sickbeard import db
import re
import datetime
import string
from name_parser.parser import NameParser, InvalidNameException
resultFilters = ["sub(pack|s|bed)", "nlsub(bed|s)?", "swesub(bed)?",
"(dir|sample|nfo)fix", "sample", "(dvd)?extras"]
def filterBadReleases(name,showLang=u"en"):
"""
Filters out non-english and just all-around stupid releases by comparing them
to the resultFilters contents.
name: the release name to check
Returns: True if the release name is OK, False if it's bad.
"""
additionalFilters = []
if showLang == u"en":
additionalFilters.append("dub(bed)?")
try:
fp = NameParser()
parse_result = fp.parse(name)
except InvalidNameException:
logger.log(u"Unable to parse the filename "+name+" into a valid episode", logger.WARNING)
return False
# use the extra info and the scene group to filter against
check_string = ''
if parse_result.extra_info:
check_string = parse_result.extra_info
if parse_result.release_group:
if check_string:
check_string = check_string + '-' + parse_result.release_group
else:
check_string = parse_result.release_group
# if there's no info after the season info then assume it's fine
if not check_string:
check_string = name
# if any of the bad strings are in the name then say no
if sickbeard.IGNORE_WORDS == "":
ignore_words="ztreyfgut"
else:
ignore_words=sickbeard.IGNORE_WORDS
for x in resultFilters + ignore_words.split(',') + additionalFilters:
if x == showLanguages.get(showLang):
continue
if re.search('(^|[\W_])'+x+'($|[\W_])', check_string, re.I):
logger.log(u"Invalid scene release: "+name+" contains "+x+", ignoring it", logger.DEBUG)
return False
return True
def sceneToNormalShowNames(name):
"""
Takes a show name from a scene dirname and converts it to a more "human-readable" format.
name: The show name to convert
Returns: a list of all the possible "normal" names
"""
if not name:
return []
name_list = [name]
# use both and and &
new_name = re.sub('(?i)([\. ])and([\. ])', '\\1&\\2', name, re.I)
if new_name not in name_list:
name_list.append(new_name)
results = []
for cur_name in name_list:
# add brackets around the year
results.append(re.sub('(\D)(\d{4})$', '\\1(\\2)', cur_name))
# add brackets around the country
country_match_str = '|'.join(countryList.values())
results.append(re.sub('(?i)([. _-])('+country_match_str+')$', '\\1(\\2)', cur_name))
results += name_list
return list(set(results))
def makeSceneShowSearchStrings(show):
showNames = allPossibleShowNames(show)
# scenify the names
return map(sanitizeSceneName, showNames)
def makeSceneSeasonSearchString (show, segment, extraSearchType=None):
myDB = db.DBConnection()
if show.air_by_date:
numseasons = 0
# the search string for air by date shows is just
seasonStrings = [segment]
else:
numseasonsSQlResult = myDB.select("SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0", [show.tvdbid])
numseasons = int(numseasonsSQlResult[0][0])
seasonStrings = ["S%02d" % segment]
# since nzbmatrix allows more than one search per request we search SxEE results too
if extraSearchType == "nzbmatrix":
seasonStrings.append("%ix" % segment)
showNames = set(makeSceneShowSearchStrings(show))
toReturn = []
term_list = []
# search each show name
for curShow in showNames:
# most providers all work the same way
if not extraSearchType:
# if there's only one season then we can just use the show name straight up
if numseasons == 1:
toReturn.append(curShow)
# for providers that don't allow multiple searches in one request we only search for Sxx style stuff
else:
for cur_season in seasonStrings:
toReturn.append(curShow + "." + cur_season)
# nzbmatrix is special, we build a search string just for them
elif extraSearchType == "nzbmatrix":
if numseasons == 1:
toReturn.append('"'+curShow+'"')
elif numseasons == 0:
toReturn.append('"'+curShow+' '+str(segment).replace('-',' ')+'"')
else:
term_list = [x+'*' for x in seasonStrings]
if show.air_by_date:
term_list = ['"'+x+'"' for x in term_list]
toReturn.append('"'+curShow+'"')
if extraSearchType == "nzbmatrix":
toReturn = ['+('+','.join(toReturn)+')']
if term_list:
toReturn.append('+('+','.join(term_list)+')')
return toReturn
def makeSceneSearchString (episode):
myDB = db.DBConnection()
numseasonsSQlResult = myDB.select("SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0", [episode.show.tvdbid])
numseasons = int(numseasonsSQlResult[0][0])
numepisodesSQlResult = myDB.select("SELECT COUNT(episode) as numepisodes FROM tv_episodes WHERE showid = ? and season != 0", [episode.show.tvdbid])
numepisodes = int(numepisodesSQlResult[0][0])
# see if we should use dates instead of episodes
if episode.show.air_by_date and episode.airdate != datetime.date.fromordinal(1):
epStrings = [str(episode.airdate)]
else:
epStrings = ["S%02iE%02i" % (int(episode.season), int(episode.episode)),
"%ix%02i" % (int(episode.season), int(episode.episode))]
# for single-season shows just search for the show name -- if total ep count (exclude s0) is less than 11
# due to the amount of qualities and releases, it is easy to go over the 50 result limit on rss feeds otherwise
if numseasons == 1 and numepisodes < 11:
epStrings = ['']
showNames = set(makeSceneShowSearchStrings(episode.show))
toReturn = []
for curShow in showNames:
for curEpString in epStrings:
toReturn.append(curShow + '.' + curEpString)
return toReturn
def isGoodResult(name, show, log=True):
"""
Use an automatically-created regex to make sure the result actually is the show it claims to be
"""
all_show_names = allPossibleShowNames(show)
showNames = map(sanitizeSceneName, all_show_names) + all_show_names
for curName in set(showNames):
escaped_name = re.sub('\\\\[\\s.-]', '\W+', re.escape(curName))
if show.startyear:
escaped_name += "(?:\W+"+str(show.startyear)+")?"
curRegex = '^' + escaped_name + '\W+(?:(?:S\d[\dE._ -])|(?:\d\d?x)|(?:\d{4}\W\d\d\W\d\d)|(?:(?:part|pt)[\._ -]?(\d|[ivx]))|(Sea|sai)son\W+\d+\W+|E\d+\W+)'
if log:
logger.log(u"Checking if show "+name+" matches " + curRegex, logger.DEBUG)
match = re.search(curRegex, name, re.I)
if match:
logger.log(u"Matched "+curRegex+" to "+name, logger.DEBUG)
return True
if log:
logger.log(u"Provider gave result "+name+" but that doesn't seem like a valid result for "+show.name+" so I'm ignoring it")
return False
def allPossibleShowNames(show):
"""
Figures out every possible variation of the name for a particular show. Includes TVDB name, TVRage name,
country codes on the end, eg. "Show Name (AU)", and any scene exception names.
show: a TVShow object that we should get the names of
Returns: a list of all the possible show names
"""
showNames = [show.name]
for name in get_scene_exceptions(show.tvdbid):
if not name in showNames:
showNames.append( name )
# if we have a tvrage name then use it
if show.tvrname != "" and show.tvrname != None:
showNames.append(show.tvrname)
newShowNames = []
country_list = countryList
country_list.update(dict(zip(countryList.values(), countryList.keys())))
# if we have "Show Name Australia" or "Show Name (Australia)" this will add "Show Name (AU)" for
# any countries defined in common.countryList
# (and vice versa)
for curName in set(showNames):
if not curName:
continue
for curCountry in country_list:
if curName.endswith(' '+curCountry):
newShowNames.append(curName.replace(' '+curCountry, ' ('+country_list[curCountry]+')'))
elif curName.endswith(' ('+curCountry+')'):
newShowNames.append(curName.replace(' ('+curCountry+')', ' ('+country_list[curCountry]+')'))
showNames += newShowNames
return showNames
| bob123bob/Sick-Beard | sickbeard/show_name_helpers.py | Python | gpl-3.0 | 9,869 |
# -*- coding: utf-8 -*-
'''
Model file module, so that model files are only loaded once when imported
'''
import os
import sys
import tensorflow as tf
from facenet.src import facenet
from facenet.src.align import detect_face
fileDir = os.path.dirname(os.path.realpath(__file__))
facenetDir = os.path.join(fileDir, 'facenet')
facenetModelDir = os.path.join(facenetDir, 'src', 'align',)
session = None
graph = None
# Actual models used for face detection
pnet = None
rnet = None
onet = None
graph = tf.Graph()
session = tf.Session(graph=graph) #config=tf.ConfigProto(inter_op_parallelism_threads=24, intra_op_parallelism_threads=24))
with graph.as_default():
with session.as_default():
pnet, rnet, onet = detect_face.create_mtcnn(session, facenetModelDir)
graph.finalize()
| lodemo/CATANA | src/face_recognition/MtcnnModel.py | Python | mit | 794 |
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Test for RequireLoginEverywhereMiddleware in middleware.py
#
# This test uses "nose"-style testing (no need for a TestCase),
# and nose-style assertions.
from nose.tools import *
from django.test.client import Client
import django
def test_require_login():
c = Client()
# We're not logged in, so expect a redirection.
response = c.get('/profile')
assert_true(isinstance(response, django.http.HttpResponseRedirect), "Expected redirect")
assert_equal("/hue/accounts/login?next=/profile", response["Location"])
# AllowAllBackend should let us in.
c.login(username="test", password="test")
# And now we shouldn't need to be redirected.
response = c.get('/', follow=True)
assert_equal(200, response.status_code)
def test_ajax_require_login():
c = Client()
response = c.get('/profile',
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
assert_equal("LOGIN_REQUIRED", response["X-Hue-Middleware-Response"],
"Expected magic header from middleware")
| kawamon/hue | desktop/core/src/desktop/require_login_test.py | Python | apache-2.0 | 1,795 |
"""
Shared utilities for codejail tests.
"""
from .. import jail_code
class ResetJailCodeStateMixin:
"""
The jail_code module has global state.
Use this mixin to reset jail_code to its initial state before running a test function,
and then restore the existing state once the test function is complete.
"""
def setUp(self):
"""
Reset global variables back to defaults, copying and saving existing values.
"""
super().setUp()
# pylint: disable=invalid-name
self._COMMANDS = jail_code.COMMANDS
self._LIMITS = jail_code.LIMITS
self._LIMIT_OVERRIDES = jail_code.LIMIT_OVERRIDES
jail_code.COMMANDS = {}
jail_code.LIMITS = jail_code.DEFAULT_LIMITS.copy()
jail_code.LIMIT_OVERRIDES = {}
def tearDown(self):
"""
Restore global variables to the values they had before running the test.
"""
super().setUp()
jail_code.COMMANDS = self._COMMANDS
jail_code.LIMITS = self._LIMITS
jail_code.LIMIT_OVERRIDES = self._LIMIT_OVERRIDES
| edx/codejail | codejail/tests/util.py | Python | apache-2.0 | 1,093 |
#!/usr/bin/env python
from RunBase import *
import time
T=ParseModel("LCDM")
#T.setMnu(0.0)
L=ParseDataset("BBAO+CMBP+SN")#+CMBP")
T.printFreeParameters()
L.setTheory(T)
print T.WangWangVec()
t0 = time.time()
for i in range(30):
print i
loglike=L.loglike()
t= time.time()-t0
print loglike,t
| slosar/april | attick/SpeedTest.py | Python | gpl-2.0 | 303 |
from urllib.parse import urlparse, urljoin
from flask import request, url_for, redirect, session
from flask_wtf import Form
from wtforms import TextField, PasswordField, HiddenField
from wtforms.validators import InputRequired
class RedirectForm(Form):
next = HiddenField()
def __init__(self, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
if not self.next.data:
self.next.data = get_redirect_target() or ''
def redirect(self, endpoint='about', **values):
if is_safe_url(self.next.data):
return redirect(self.next.data)
target = get_redirect_target()
return redirect(target or url_for(endpoint, **values))
class LoginForm(RedirectForm):
username = TextField('Username', validators=[InputRequired()])
password = PasswordField('Password', validators=[InputRequired()])
####################
# helper functions #
####################
def is_safe_url(target):
ref_url = urlparse(request.host_url)
test_url = urlparse(urljoin(request.host_url, target))
return test_url.scheme in ('http', 'https') and \
ref_url.netloc == test_url.netloc
def get_redirect_target():
if 'next_url' in session:
target = session['next_url']
else:
target = request.referrer
if is_safe_url(target):
return target
| heejongahn/hjlog | hjlog/forms/login.py | Python | mit | 1,344 |
import csv
with open('historical_data.csv', 'rb') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
if int(row["TIME"]) == 0 :
save = float(row["Speed"])
else:
if(float(row["Speed"]) - save >= 0.1*save or -float(row["Speed"]) + save >= 0.1*save ):
print row["SER"] + "->" , int(row["TIME"])-1
save = float(row["Speed"])
| manglakaran/TrafficKarmaSent | extras/check_break.py | Python | mit | 361 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ListAttestors
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-binaryauthorization
# [START binaryauthorization_v1beta1_generated_BinauthzManagementServiceV1Beta1_ListAttestors_sync]
from google.cloud import binaryauthorization_v1beta1
def sample_list_attestors():
# Create a client
client = binaryauthorization_v1beta1.BinauthzManagementServiceV1Beta1Client()
# Initialize request argument(s)
request = binaryauthorization_v1beta1.ListAttestorsRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_attestors(request=request)
# Handle the response
for response in page_result:
print(response)
# [END binaryauthorization_v1beta1_generated_BinauthzManagementServiceV1Beta1_ListAttestors_sync]
| googleapis/python-binary-authorization | samples/generated_samples/binaryauthorization_v1beta1_generated_binauthz_management_service_v1_beta1_list_attestors_sync.py | Python | apache-2.0 | 1,638 |
import bs4
import copy
import datetime
import functools
import time
from django.conf import settings as django_settings
from django.core import management
from django.core import serializers
import django.core.mail
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.client import Client
from django.utils import translation
from askbot.tests import utils
from askbot.tests.utils import with_settings
from askbot import models
from askbot import mail
from askbot.conf import settings as askbot_settings
from askbot import const
from askbot.models.question import Thread
TO_JSON = functools.partial(serializers.serialize, 'json')
def email_alert_test(test_func):
"""decorator for test methods in
:class:`~askbot.tests.email_alert_tests.EmailAlertTests`
wraps tests with a generic sequence of testing
email alerts on updates to anything relating to
given question
"""
@functools.wraps(test_func)
def wrapped_test(test_object, *args, **kwargs):
func_name = test_func.__name__
if func_name.startswith('test_'):
test_name = func_name.replace('test_', '', 1)
#run the main codo of the test function
test_func(test_object)
#if visit_timestamp is set,
#target user will visit the question at that time
test_object.maybe_visit_question()
test_object.send_alerts()
test_object.check_results(test_name)
else:
raise ValueError('test method names must have prefix "test_"')
return wrapped_test
def setup_email_alert_tests(setup_func):
@functools.wraps(setup_func)
def wrapped_setup(test_object, *args, **kwargs):
#empty email subscription schedule
#no email is sent
test_object.notification_schedule = \
copy.deepcopy(models.EmailFeedSetting.NO_EMAIL_SCHEDULE)
#timestamp to use for the setup
#functions
test_object.setup_timestamp = datetime.datetime.now()
#timestamp to use for the question visit
#by the target user
#if this timestamp is None then there will be no visit
#otherwise question will be visited by the target user
#at that time
test_object.visit_timestamp = None
#dictionary to hols expected results for each test
#actual data@is initialized in the code just before the function
#or in the body of the subclass
test_object.expected_results = dict()
#do not follow by default (do not use q_sel type subscription)
test_object.follow_question = False
#fill out expected result for each test
test_object.expected_results['q_ask'] = {'message_count': 0, }
test_object.expected_results['q_ask_delete_answer'] = {'message_count': 0, }
test_object.expected_results['question_comment'] = {'message_count': 0, }
test_object.expected_results['question_comment_delete'] = {'message_count': 0, }
test_object.expected_results['answer_comment'] = {'message_count': 0, }
test_object.expected_results['answer_delete_comment'] = {'message_count': 0, }
test_object.expected_results['mention_in_question'] = {'message_count': 0, }
test_object.expected_results['mention_in_answer'] = {'message_count': 0, }
test_object.expected_results['question_edit'] = {'message_count': 0, }
test_object.expected_results['answer_edit'] = {'message_count': 0, }
test_object.expected_results['question_and_answer_by_target'] = {'message_count': 0, }
test_object.expected_results['q_ans'] = {'message_count': 0, }
test_object.expected_results['q_ans_new_answer'] = {'message_count': 0, }
#this function is expected to contain a difference between this
#one and the desired setup within the concrete test
setup_func(test_object)
#must call this after setting up the notification schedule
#because it is needed in setUpUsers() function
test_object.setUpUsers()
return wrapped_setup
class SubjectLineTests(TestCase):
"""Tests for the email subject line"""
def test_set_prefix(self):
"""set prefix and see if it is there
"""
askbot_settings.update('EMAIL_SUBJECT_PREFIX', 'test prefix')
subj = mail.prefix_the_subject_line('hahah')
self.assertEquals(subj, 'test prefix hahah')
def test_can_disable_prefix(self):
"""set prefix to empty string and make sure
that the subject line is not altered"""
askbot_settings.update('EMAIL_SUBJECT_PREFIX', '')
subj = mail.prefix_the_subject_line('hahah')
self.assertEquals(subj, 'hahah')
class EmailAlertTests(TestCase):
"""Base class for testing delayed Email notifications
that are triggered by the send_email_alerts
command
this class tests cases where target user has no subscriptions
that is all subscriptions off
subclasses should redefine initial data via the static
class member this class tests cases where target user has no subscriptions
that is all subscriptions off
this class also defines a few utility methods that do
not run any tests themselves
class variables:
* notification_schedule
* setup_timestamp
* visit_timestamp
* expected_results
should be set in subclasses to reuse testing code
"""
def send_alerts(self):
"""runs the send_email_alerts management command
and makes a shortcut access to the outbox
"""
#make sure tha we are not sending email for real
#this setting must be present in settings.py
assert(
django_settings.EMAIL_BACKEND == 'django.core.mail.backends.locmem.EmailBackend'
)
management.call_command('send_email_alerts')
@setup_email_alert_tests
def setUp(self):
"""generic pre-test setup method:
this function is empty - because it's intendend content is
entirely defined by the decorator
the ``setUp()`` function in any subclass must only enter differences
between the default version (defined in the decorator) and the
desired version in the "real" test
"""
translation.activate('en')
pass
def setUpUsers(self):
self.other_user = utils.create_user(
username = 'other',
email = '[email protected]',
date_joined = self.setup_timestamp,
status = 'm'
)
self.target_user = utils.create_user(
username = 'target',
email = '[email protected]',
notification_schedule = self.notification_schedule,
date_joined = self.setup_timestamp,
status = 'm'
)
def post_comment(
self,
author = None,
parent_post = None,
body_text = 'dummy test comment',
timestamp = None
):
"""posts and returns a comment to parent post, uses
now timestamp if not given, dummy body_text
author is required
"""
if timestamp is None:
timestamp = self.setup_timestamp
comment = author.post_comment(
parent_post = parent_post,
body_text = body_text,
timestamp = timestamp,
)
return comment
def edit_post(
self,
author = None,
post = None,
timestamp = None,
body_text = 'edited body text',
):
"""todo: this method may also edit other stuff
like post title and tags - in the case when post is
of type question
"""
if timestamp is None:
timestamp = self.setup_timestamp
author.edit_post(
post=post,
timestamp=timestamp,
body_text=body_text,
revision_comment='nothing serious'
)
def post_question(
self,
author = None,
timestamp = None,
title = 'test question title',
body_text = 'test question body',
tags = 'test',
):
"""post a question with dummy content
and return it
"""
if timestamp is None:
timestamp = self.setup_timestamp
self.question = author.post_question(
title = title,
body_text = body_text,
tags = tags,
timestamp = timestamp
)
if self.follow_question:
self.target_user.follow_question(self.question)
return self.question
def maybe_visit_question(self, user = None):
"""visits question on behalf of a given user and applies
a timestamp set in the class attribute ``visit_timestamp``
if ``visit_timestamp`` is None, then visit is skipped
parameter ``user`` is optional if not given, the visit will occur
on behalf of the user stored in the class attribute ``target_user``
"""
if self.visit_timestamp:
if user is None:
user = self.target_user
user.visit_post(
question = self.question,
timestamp = self.visit_timestamp
)
def post_answer(
self,
question = None,
author = None,
body_text = 'test answer body',
timestamp = None,
follow = None,#None - do nothing, True/False - follow/unfollow
):
"""post answer with dummy content and return it
"""
if timestamp is None:
timestamp = self.setup_timestamp
if follow is None:
if author.is_following_question(question):
follow = True
else:
follow = False
elif follow not in (True, False):
raise ValueError('"follow" may be only None, True or False')
return author.post_answer(
question = question,
body_text = body_text,
timestamp = timestamp,
follow = follow,
)
def check_results(self, test_key = None):
if test_key is None:
raise ValueError('test_key parameter is required')
expected = self.expected_results[test_key]
outbox = django.core.mail.outbox
error_message = 'emails_sent=%d, expected=%d, function=%s.test_%s' % (
len(outbox),
expected['message_count'],
self.__class__.__name__,
test_key,
)
#compares number of emails in the outbox and
#the expected message count for the current test
self.assertEqual(len(outbox), expected['message_count'], error_message)
if expected['message_count'] > 0:
if len(outbox) > 0:
error_message = 'expected recipient %s found %s' % \
(self.target_user.email, outbox[0].recipients()[0])
#verify that target user receives the email
self.assertEqual(
outbox[0].recipients()[0],
self.target_user.email,
error_message
)
def proto_post_answer_comment(self):
"""base method for use in some tests
"""
question = self.post_question(
author = self.other_user
)
answer = self.post_answer(
question = question,
author = self.target_user
)
comment = self.post_comment(
parent_post = answer,
author = self.other_user,
)
return comment
@email_alert_test
def test_answer_comment(self):
"""target user posts answer and other user posts a comment
to the answer
"""
self.proto_post_answer_comment()
@email_alert_test
def test_answer_delete_comment(self):
comment = self.proto_post_answer_comment()
comment.author.delete_comment(comment = comment)
@email_alert_test
def test_question_edit(self):
question = self.post_question(author=self.target_user)
self.edit_post(post=question, author=self.other_user)
self.question = question
@email_alert_test
def test_answer_edit(self):
question = self.post_question(
author = self.target_user
)
answer = self.post_answer(
question = question,
author = self.target_user
)
self.edit_post(
post = answer,
author = self.other_user
)
self.question = question
@email_alert_test
def test_question_and_answer_by_target(self):
question = self.post_question(
author = self.target_user
)
answer = self.post_answer(
question = question,
author = self.target_user
)
self.question = question
def proto_question_comment(self):
question = self.post_question(
author = self.target_user,
)
comment = self.post_comment(
author = self.other_user,
parent_post = question,
)
return comment
@email_alert_test
def test_question_comment(self):
"""target user posts question other user posts a comment
target user does or does not receive email notification
depending on the setup parameters
in the base class user does not receive a notification
"""
self.proto_question_comment()
@email_alert_test
def test_question_comment_delete(self):
"""target user posts question other user posts a comment
target user does or does not receive email notification
depending on the setup parameters
in the base class user does not receive a notification
"""
comment = self.proto_question_comment()
comment.author.delete_comment(comment)
def proto_test_q_ask(self):
"""base method for tests that
have name containing q_ask - i.e. target asks other answers
answer is returned
"""
question = self.post_question(
author = self.target_user,
)
answer = self.post_answer(
question = question,
author = self.other_user,
)
return answer
@email_alert_test
def test_q_ask(self):
"""target user posts question
other user answer the question
"""
self.proto_test_q_ask()
@email_alert_test
def test_q_ask_delete_answer(self):
answer = self.proto_test_q_ask()
self.other_user.delete_post(answer)
@email_alert_test
def test_q_ans(self):
"""other user posts question
target user post answer
"""
question = self.post_question(
author = self.other_user,
)
self.post_answer(
question = question,
author = self.target_user
)
self.question = question
@email_alert_test
def test_q_ans_new_answer(self):
"""other user posts question
target user post answer and other user
posts another answer
"""
question = self.post_question(
author = self.other_user,
)
self.post_answer(
question = question,
author = self.target_user
)
self.post_answer(
question = question,
author = self.other_user
)
self.question = question
@email_alert_test
def test_mention_in_question(self):
question = self.post_question(
author = self.other_user,
body_text = 'hey @target get this'
)
self.question = question
@email_alert_test
def test_mention_in_answer(self):
question = self.post_question(
author = self.other_user,
)
self.post_answer(
question = question,
author = self.other_user,
body_text = 'hey @target check this out'
)
self.question = question
class WeeklyQAskEmailAlertTests(EmailAlertTests):
@setup_email_alert_tests
def setUp(self):
self.notification_schedule['q_ask'] = 'w'
self.setup_timestamp = datetime.datetime.now() - datetime.timedelta(14)
self.expected_results['q_ask'] = {'message_count': 1}
self.expected_results['q_ask_delete_answer'] = {'message_count': 0}
self.expected_results['question_edit'] = {'message_count': 1, }
self.expected_results['answer_edit'] = {'message_count': 1, }
#local tests
self.expected_results['question_edit_reedited_recently'] = \
{'message_count': 1}
self.expected_results['answer_edit_reedited_recently'] = \
{'message_count': 1}
@email_alert_test
def test_question_edit_reedited_recently(self):
question = self.post_question(
author = self.target_user
)
self.edit_post(
post = question,
author = self.other_user,
)
self.edit_post(
post = question,
author = self.other_user,
timestamp = datetime.datetime.now() - datetime.timedelta(1)
)
@email_alert_test
def test_answer_edit_reedited_recently(self):
question = self.post_question(
author = self.target_user
)
answer = self.post_answer(
question = question,
author = self.other_user,
)
self.edit_post(
post = answer,
author = self.other_user,
timestamp = datetime.datetime.now() - datetime.timedelta(1)
)
class WeeklyMentionsAndCommentsEmailAlertTests(EmailAlertTests):
@setup_email_alert_tests
def setUp(self):
self.notification_schedule['m_and_c'] = 'w'
self.setup_timestamp = datetime.datetime.now() - datetime.timedelta(14)
self.expected_results['question_comment'] = {'message_count': 1, }
self.expected_results['question_comment_delete'] = {'message_count': 0, }
self.expected_results['answer_comment'] = {'message_count': 1, }
self.expected_results['answer_delete_comment'] = {'message_count': 0, }
self.expected_results['mention_in_question'] = {'message_count': 1, }
self.expected_results['mention_in_answer'] = {'message_count': 1, }
class WeeklyQAnsEmailAlertTests(EmailAlertTests):
@setup_email_alert_tests
def setUp(self):
self.notification_schedule['q_ans'] = 'w'
self.setup_timestamp = datetime.datetime.now() - datetime.timedelta(14)
self.expected_results['answer_edit'] = {'message_count': 1, }
self.expected_results['q_ans_new_answer'] = {'message_count': 1, }
class InstantQAskEmailAlertTests(EmailAlertTests):
@setup_email_alert_tests
def setUp(self):
self.notification_schedule['q_ask'] = 'i'
self.setup_timestamp = datetime.datetime.now() - datetime.timedelta(1)
self.expected_results['q_ask'] = {'message_count': 1}
self.expected_results['q_ask_delete_answer'] = {'message_count': 1}
self.expected_results['question_edit'] = {'message_count': 1, }
self.expected_results['answer_edit'] = {'message_count': 1, }
class InstantWholeForumEmailAlertTests(EmailAlertTests):
@setup_email_alert_tests
def setUp(self):
self.notification_schedule['q_all'] = 'i'
self.setup_timestamp = datetime.datetime.now() - datetime.timedelta(1)
self.expected_results['q_ask'] = {'message_count': 1, }
self.expected_results['q_ask_delete_answer'] = {'message_count': 1}
self.expected_results['question_comment'] = {'message_count': 1, }
self.expected_results['question_comment_delete'] = {'message_count': 1, }
self.expected_results['answer_comment'] = {'message_count': 2, }
self.expected_results['answer_delete_comment'] = {'message_count': 2, }
self.expected_results['mention_in_question'] = {'message_count': 1, }
self.expected_results['mention_in_answer'] = {'message_count': 2, }
self.expected_results['question_edit'] = {'message_count': 1, }
self.expected_results['answer_edit'] = {'message_count': 1, }
self.expected_results['question_and_answer_by_target'] = {'message_count': 0, }
self.expected_results['q_ans'] = {'message_count': 1, }
self.expected_results['q_ans_new_answer'] = {'message_count': 2, }
def test_global_subscriber_with_zero_frequency_gets_no_email(self):
user = self.target_user
user.notification_subscriptions.update(frequency='n')
user.email_tag_filter_strategy = const.INCLUDE_ALL
user.save()
self.post_question(author=self.other_user)
outbox = django.core.mail.outbox
self.assertEqual(len(outbox), 0)
class BlankWeeklySelectedQuestionsEmailAlertTests(EmailAlertTests):
"""blank means that this is testing for the absence of email
because questions are not followed as set by default in the
parent class
"""
@setup_email_alert_tests
def setUp(self):
self.notification_schedule['q_sel'] = 'w'
self.setup_timestamp = datetime.datetime.now() - datetime.timedelta(14)
self.expected_results['q_ask'] = {'message_count': 1, }
self.expected_results['q_ask_delete_answer'] = {'message_count': 0, }
self.expected_results['question_comment'] = {'message_count': 0, }
self.expected_results['question_comment_delete'] = {'message_count': 0, }
self.expected_results['answer_comment'] = {'message_count': 0, }
self.expected_results['answer_delete_comment'] = {'message_count': 0, }
self.expected_results['mention_in_question'] = {'message_count': 0, }
self.expected_results['mention_in_answer'] = {'message_count': 0, }
self.expected_results['question_edit'] = {'message_count': 1, }
self.expected_results['answer_edit'] = {'message_count': 1, }
self.expected_results['question_and_answer_by_target'] = {'message_count': 0, }
self.expected_results['q_ans'] = {'message_count': 0, }
self.expected_results['q_ans_new_answer'] = {'message_count': 0, }
class BlankInstantSelectedQuestionsEmailAlertTests(EmailAlertTests):
"""blank means that this is testing for the absence of email
because questions are not followed as set by default in the
parent class
"""
@setup_email_alert_tests
def setUp(self):
self.notification_schedule['q_sel'] = 'i'
self.setup_timestamp = datetime.datetime.now() - datetime.timedelta(1)
self.expected_results['q_ask'] = {'message_count': 1, }
self.expected_results['q_ask_delete_answer'] = {'message_count': 1, }
self.expected_results['question_comment'] = {'message_count': 1, }
self.expected_results['question_comment_delete'] = {'message_count': 1, }
self.expected_results['answer_comment'] = {'message_count': 0, }
self.expected_results['answer_delete_comment'] = {'message_count': 0, }
self.expected_results['mention_in_question'] = {'message_count': 0, }
self.expected_results['mention_in_answer'] = {'message_count': 0, }
self.expected_results['question_edit'] = {'message_count': 1, }
self.expected_results['answer_edit'] = {'message_count': 1, }
self.expected_results['question_and_answer_by_target'] = {'message_count': 0, }
self.expected_results['q_ans'] = {'message_count': 0, }
self.expected_results['q_ans_new_answer'] = {'message_count': 0, }
class LiveWeeklySelectedQuestionsEmailAlertTests(EmailAlertTests):
"""live means that this is testing for the presence of email
as all questions are automatically followed by user here
"""
@setup_email_alert_tests
def setUp(self):
self.notification_schedule['q_sel'] = 'w'
self.setup_timestamp = datetime.datetime.now() - datetime.timedelta(14)
self.follow_question = True
self.expected_results['q_ask'] = {'message_count': 1, }
self.expected_results['q_ask_delete_answer'] = {'message_count': 0}
self.expected_results['question_comment'] = {'message_count': 0, }
self.expected_results['question_comment_delete'] = {'message_count': 0, }
self.expected_results['answer_comment'] = {'message_count': 0, }
self.expected_results['answer_delete_comment'] = {'message_count': 0, }
self.expected_results['mention_in_question'] = {'message_count': 1, }
self.expected_results['mention_in_answer'] = {'message_count': 1, }
self.expected_results['question_edit'] = {'message_count': 1, }
self.expected_results['answer_edit'] = {'message_count': 1, }
self.expected_results['question_and_answer_by_target'] = {'message_count': 0, }
self.expected_results['q_ans'] = {'message_count': 0, }
self.expected_results['q_ans_new_answer'] = {'message_count': 1, }
class LiveInstantSelectedQuestionsEmailAlertTests(EmailAlertTests):
"""live means that this is testing for the presence of email
as all questions are automatically followed by user here
"""
@setup_email_alert_tests
def setUp(self):
self.notification_schedule['q_sel'] = 'i'
#first posts yesterday
self.setup_timestamp = datetime.datetime.now() - datetime.timedelta(1)
self.follow_question = True
self.expected_results['q_ask'] = {'message_count': 1, }
self.expected_results['q_ask_delete_answer'] = {'message_count': 1}
self.expected_results['question_comment'] = {'message_count': 1, }
self.expected_results['question_comment_delete'] = {'message_count': 1, }
self.expected_results['answer_comment'] = {'message_count': 1, }
self.expected_results['answer_delete_comment'] = {'message_count': 1, }
self.expected_results['mention_in_question'] = {'message_count': 0, }
self.expected_results['mention_in_answer'] = {'message_count': 1, }
self.expected_results['question_edit'] = {'message_count': 1, }
self.expected_results['answer_edit'] = {'message_count': 1, }
self.expected_results['question_and_answer_by_target'] = {'message_count': 0, }
self.expected_results['q_ans'] = {'message_count': 0, }
self.expected_results['q_ans_new_answer'] = {'message_count': 1, }
class InstantMentionsAndCommentsEmailAlertTests(EmailAlertTests):
@setup_email_alert_tests
def setUp(self):
self.notification_schedule['m_and_c'] = 'i'
self.setup_timestamp = datetime.datetime.now() - datetime.timedelta(1)
self.expected_results['question_comment'] = {'message_count': 1, }
self.expected_results['question_comment_delete'] = {'message_count': 1, }
self.expected_results['answer_comment'] = {'message_count': 1, }
self.expected_results['answer_delete_comment'] = {'message_count': 1, }
self.expected_results['mention_in_question'] = {'message_count': 1, }
self.expected_results['mention_in_answer'] = {'message_count': 1, }
#specialized local test case
self.expected_results['question_edited_mention_stays'] = {'message_count': 1}
@email_alert_test
def test_question_edited_mention_stays(self):
question = self.post_question(
author = self.other_user,
body_text = 'hey @target check this one',
)
self.edit_post(
post = question,
author = self.other_user,
body_text = 'yoyo @target do look here'
)
class InstantQAnsEmailAlertTests(EmailAlertTests):
@setup_email_alert_tests
def setUp(self):
self.notification_schedule['q_ans'] = 'i'
self.setup_timestamp = datetime.datetime.now() - datetime.timedelta(1)
self.expected_results['answer_edit'] = {'message_count': 1, }
self.expected_results['q_ans_new_answer'] = {'message_count': 1, }
class DelayedAlertSubjectLineTests(TestCase):
def test_topics_in_subject_line(self):
threads = [
models.Thread(tagnames='one two three four five'),
models.Thread(tagnames='two three four five'),
models.Thread(tagnames='three four five'),
models.Thread(tagnames='four five'),
models.Thread(tagnames='five'),
]
subject = Thread.objects.get_tag_summary_from_threads(threads)
self.assertEqual('"five", "four", "three", "two" and "one"', subject)
threads += [
models.Thread(tagnames='six'),
models.Thread(tagnames='six'),
models.Thread(tagnames='six'),
models.Thread(tagnames='six'),
models.Thread(tagnames='six'),
models.Thread(tagnames='six'),
]
subject = Thread.objects.get_tag_summary_from_threads(threads)
self.assertEqual('"six", "five", "four", "three", "two" and more', subject)
class FeedbackTests(utils.AskbotTestCase):
def setUp(self):
self.create_user(username = 'user1', status='m')
self.create_user(username = 'user2', status='m')
u3 = self.create_user(username = 'user3')
u3.is_superuser = True
u3.save()
def assert_feedback_works(self):
outbox = django.core.mail.outbox
self.assertEqual(len(outbox), 1)
#todo: change groups to django groups
#then replace to 4 back to 3 in the line below
self.assertEqual(len(outbox[0].recipients()), 3)
def test_feedback_post_form(self):
client = Client()
data = {
'email': '[email protected]',
'text': 'hi this is a test case',
'subject': 'subject line'
}
response = client.post(reverse('feedback'), data)
self.assertEquals(response.status_code, 200)
self.assertEquals(response.templates[0].name, 'feedback.html')
def test_mail_moderators(self):
"""tests askbot.mail_moderators()
"""
mail.mail_moderators('subject', 'text')
self.assert_feedback_works()
class TagFollowedInstantWholeForumEmailAlertTests(utils.AskbotTestCase):
def setUp(self):
self.user1 = self.create_user(
username = 'user1',
notification_schedule = {'q_all': 'i'},
status = 'm'
)
self.user2 = self.create_user(
username = 'user2',
status = 'm'
)
def test_wildcard_catches_new_tag(self):
"""users asks a question with a brand new tag
and other user subscribes to it by wildcard
"""
askbot_settings.update('USE_WILDCARD_TAGS', True)
self.user1.email_tag_filter_strategy = const.INCLUDE_INTERESTING
self.user1.save()
self.user1.mark_tags(
wildcards = ('some*',),
reason = 'good',
action = 'add'
)
self.user2.post_question(
title = 'some title',
body_text = 'some text for the question',
tags = 'something'
)
outbox = django.core.mail.outbox
self.assertEqual(len(outbox), 1)
self.assertEqual(len(outbox[0].recipients()), 1)
self.assertTrue(
self.user1.email in outbox[0].recipients()
)
@with_settings(SUBSCRIBED_TAG_SELECTOR_ENABLED=False)
def test_tag_based_subscription_on_new_question_works1(self):
"""someone subscribes for an pre-existing tag
then another user asks a question with that tag
and the subcriber receives an alert
"""
models.Tag(
name = 'something',
created_by = self.user1
).save()
self.user1.email_tag_filter_strategy = const.INCLUDE_INTERESTING
self.user1.save()
self.user1.mark_tags(
tagnames = ('something',),
reason = 'good',
action = 'add'
)
self.user2.post_question(
title = 'some title',
body_text = 'some text for the question',
tags = 'something'
)
outbox = django.core.mail.outbox
self.assertEqual(len(outbox), 1)
self.assertEqual(len(outbox[0].recipients()), 1)
self.assertTrue(
self.user1.email in outbox[0].recipients()
)
@with_settings(SUBSCRIBED_TAG_SELECTOR_ENABLED=True)
def test_tag_based_subscription_on_new_question_works2(self):
"""someone subscribes for an pre-existing tag
then another user asks a question with that tag
and the subcriber receives an alert
"""
models.Tag(
name = 'something',
created_by = self.user1
).save()
self.user1.email_tag_filter_strategy = const.INCLUDE_SUBSCRIBED
self.user1.save()
self.user1.mark_tags(
tagnames = ('something',),
reason = 'subscribed',
action = 'add'
)
self.user2.post_question(
title = 'some title',
body_text = 'some text for the question',
tags = 'something'
)
outbox = django.core.mail.outbox
self.assertEqual(len(outbox), 1)
self.assertEqual(len(outbox[0].recipients()), 1)
self.assertTrue(
self.user1.email in outbox[0].recipients()
)
class EmailReminderTestCase(utils.AskbotTestCase):
#subclass must define these (example below)
#enable_setting_name = 'ENABLE_UNANSWERED_REMINDERS'
#frequency_setting_name = 'UNANSWERED_REMINDER_FREQUENCY'
#days_before_setting_name = 'DAYS_BEFORE_SENDING_UNANSWERED_REMINDER'
#max_reminder_setting_name = 'MAX_UNANSWERED_REMINDERS'
def setUp(self):
self.u1 = self.create_user(username = 'user1')
self.u2 = self.create_user(username = 'user2')
askbot_settings.update(self.enable_setting_name, True)
askbot_settings.update(self.max_reminder_setting_name, 5)
askbot_settings.update(self.frequency_setting_name, 1)
askbot_settings.update(self.days_before_setting_name, 2)
self.wait_days = getattr(askbot_settings, self.days_before_setting_name)
self.recurrence_days = getattr(askbot_settings, self.frequency_setting_name)
self.max_emails = getattr(askbot_settings, self.max_reminder_setting_name)
def assert_have_emails(self, email_count = None):
management.call_command(self.command_name)
outbox = django.core.mail.outbox
self.assertEqual(len(outbox), email_count)
def do_post(self, timestamp):
self.question = self.post_question(
user = self.u1,
timestamp = timestamp
)
class AcceptAnswerReminderTests(EmailReminderTestCase):
"""only two test cases here, because the algorithm here
is the same as for unanswered questons,
except here we are dealing with the questions that have
or do not have an accepted answer
"""
enable_setting_name = 'ENABLE_ACCEPT_ANSWER_REMINDERS'
frequency_setting_name = 'ACCEPT_ANSWER_REMINDER_FREQUENCY'
days_before_setting_name = 'DAYS_BEFORE_SENDING_ACCEPT_ANSWER_REMINDER'
max_reminder_setting_name = 'MAX_ACCEPT_ANSWER_REMINDERS'
command_name = 'send_accept_answer_reminders'
def do_post(self, timestamp):
super(AcceptAnswerReminderTests, self).do_post(timestamp)
self.answer = self.post_answer(
question = self.question,
user = self.u2,
timestamp = timestamp
)
def test_reminder_positive_wait(self):
"""a positive test - user must receive a reminder
"""
days_ago = self.wait_days
timestamp = datetime.datetime.now() - datetime.timedelta(days_ago, 3600)
self.do_post(timestamp)
self.assert_have_emails(1)
def test_reminder_negative_wait(self):
"""negative test - the answer is accepted already"""
days_ago = self.wait_days
timestamp = datetime.datetime.now() - datetime.timedelta(days_ago, 3600)
self.do_post(timestamp)
self.u1.accept_best_answer(
answer = self.answer,
)
self.assert_have_emails(0)
class UnansweredReminderTests(EmailReminderTestCase):
enable_setting_name = 'ENABLE_UNANSWERED_REMINDERS'
frequency_setting_name = 'UNANSWERED_REMINDER_FREQUENCY'
days_before_setting_name = 'DAYS_BEFORE_SENDING_UNANSWERED_REMINDER'
max_reminder_setting_name = 'MAX_UNANSWERED_REMINDERS'
command_name = 'send_unanswered_question_reminders'
def test_reminder_positive_wait(self):
"""a positive test - user must receive a reminder
"""
days_ago = self.wait_days
timestamp = datetime.datetime.now() - datetime.timedelta(days_ago, 3600)
self.do_post(timestamp)
self.assert_have_emails(1)
def test_reminder_negative_wait(self):
"""a positive test - user must receive a reminder
"""
days_ago = self.wait_days - 1
timestamp = datetime.datetime.now() - datetime.timedelta(days_ago, 3600)
self.do_post(timestamp)
self.assert_have_emails(0)
def test_reminder_cutoff_positive(self):
"""send a reminder a slightly before the last reminder
date passes"""
days_ago = self.wait_days + (self.max_emails - 1)*self.recurrence_days - 1
timestamp = datetime.datetime.now() - datetime.timedelta(days_ago, 3600)
self.do_post(timestamp)
#todo: change groups to django groups
#then replace to 2 back to 1 in the line below
self.assert_have_emails(1)
def test_reminder_cutoff_negative(self):
"""no reminder after the time for the last reminder passes
"""
days_ago = self.wait_days + (self.max_emails - 1)*self.recurrence_days
timestamp = datetime.datetime.now() - datetime.timedelta(days_ago, 3600)
self.do_post(timestamp)
self.assert_have_emails(0)
class EmailFeedSettingTests(utils.AskbotTestCase):
def setUp(self):
self.user = self.create_user('user')
def get_user_feeds(self):
return models.EmailFeedSetting.objects.filter(subscriber = self.user)
def test_add_missings_subscriptions_noop(self):
data_before = TO_JSON(self.get_user_feeds())
self.user.add_missing_askbot_subscriptions()
data_after = TO_JSON(self.get_user_feeds())
self.assertEquals(data_before, data_after)
def test_add_missing_q_all_subscription(self):
feed = self.get_user_feeds().filter(feed_type = 'q_all')
feed.delete()
count_before = self.get_user_feeds().count()
self.user.add_missing_askbot_subscriptions()
count_after = self.get_user_feeds().count()
self.assertEquals(count_after - count_before, 1)
feed = self.get_user_feeds().filter(feed_type = 'q_all')[0]
self.assertEquals(
feed.frequency,
askbot_settings.DEFAULT_NOTIFICATION_DELIVERY_SCHEDULE_Q_ALL
)
def test_missing_subscriptions_added_automatically(self):
new_user = models.User.objects.create_user('new', '[email protected]')
feeds_before = self.get_user_feeds()
#verify that feed settigs are created automatically
#when user is just created
self.assertTrue(feeds_before.count() != 0)
data_before = TO_JSON(feeds_before)
new_user.add_missing_askbot_subscriptions()
data_after = TO_JSON(self.get_user_feeds())
self.assertEquals(data_before, data_after)
class EmailAlertTestsWithGroupsEnabled(utils.AskbotTestCase):
def setUp(self):
self.backup = askbot_settings.GROUPS_ENABLED
askbot_settings.update('GROUPS_ENABLED', True)
def tearDown(self):
askbot_settings.update('GROUPS_ENABLED', self.backup)
@with_settings(MIN_REP_TO_TRIGGER_EMAIL=1)
def test_notification_for_global_group_works(self):
sender = self.create_user('sender')
recipient = self.create_user(
'recipient',
notification_schedule=models.EmailFeedSetting.MAX_EMAIL_SCHEDULE
)
self.post_question(user=sender)
outbox = django.core.mail.outbox
self.assertEqual(len(outbox), 1)
self.assertEqual(outbox[0].recipients(), [recipient.email])
class PostApprovalTests(utils.AskbotTestCase):
"""test notifications sent to authors when their posts
are approved or published"""
def setUp(self):
self.reply_by_email = askbot_settings.REPLY_BY_EMAIL
askbot_settings.update('REPLY_BY_EMAIL', True)
self.content_moderation_mode = \
askbot_settings.CONTENT_MODERATION_MODE
askbot_settings.update('CONTENT_MODERATION_MODE', 'premoderation')
self.self_notify_when = \
askbot_settings.SELF_NOTIFY_EMAILED_POST_AUTHOR_WHEN
when = const.FOR_FIRST_REVISION
askbot_settings.update('SELF_NOTIFY_EMAILED_POST_AUTHOR_WHEN', when)
assert(
django_settings.EMAIL_BACKEND == 'django.core.mail.backends.locmem.EmailBackend'
)
def tearDown(self):
askbot_settings.update(
'REPLY_BY_EMAIL', self.reply_by_email
)
askbot_settings.update(
'CONTENT_MODERATION_MODE',
self.content_moderation_mode
)
askbot_settings.update(
'SELF_NOTIFY_EMAILED_POST_AUTHOR_WHEN',
self.self_notify_when
)
def test_emailed_question_answerable_approval_notification(self):
self.u1 = self.create_user('user1', status='a')#watched user
question = self.post_question(user=self.u1, by_email=True)
outbox = django.core.mail.outbox
#here we should get just the notification of the post
#being placed on the moderation queue
self.assertEquals(len(outbox), 1)
self.assertEquals(outbox[0].recipients(), [self.u1.email])
def test_moderated_question_answerable_approval_notification(self):
u1 = self.create_user('user1', status = 'w')
question = self.post_question(user = u1, by_email = True)
self.assertEquals(question.approved, False)
u2 = self.create_user('admin', status = 'd')
self.assertEquals(question.revisions.count(), 1)
u2.approve_post_revision(question.get_latest_revision())
outbox = django.core.mail.outbox
self.assertEquals(len(outbox), 2)
#moderation notification
self.assertEquals(outbox[0].recipients(), [u1.email,])
#self.assertEquals(outbox[1].recipients(), [u1.email,])#approval
class AbsolutizeUrlsInEmailsTests(utils.AskbotTestCase):
@with_settings(
MIN_REP_TO_TRIGGER_EMAIL=1,
APP_URL='http://example.com/',
MIN_REP_TO_INSERT_LINK=1
)
def test_urls_are_absolute(self):
u1 = self.create_user('u1')
max_email = models.EmailFeedSetting.MAX_EMAIL_SCHEDULE
u2 = self.create_user('u2', notification_schedule=max_email)
text = '<a href="/index.html">home</a>' + \
'<img alt="an image" src=\'/img.png\'><a href="https://example.com"><img src="/img.png"/></a>'
question = self.post_question(user=u1, body_text=text)
outbox = django.core.mail.outbox
html_message = outbox[0].alternatives[0][0]
content_type = outbox[0].alternatives[0][1]
self.assertEqual(content_type, 'text/html')
soup = bs4.BeautifulSoup(html_message)
links = soup.find_all('a')
url_bits = {}
for link in links:
if link.attrs['href'].startswith('mailto:'):
continue
url_bits[link.attrs['href'][:4]] = 1
self.assertEqual(len(url_bits.keys()), 1)
self.assertEqual(url_bits.keys()[0], 'http')
images = soup.find_all('img')
url_bits = {}
for img in images:
url_bits[img.attrs['src'][:4]] = 1
self.assertEqual(len(url_bits.keys()), 1)
self.assertEqual(url_bits.keys()[0], 'http')
class MailMessagesTests(utils.AskbotTestCase):
def test_ask_for_signature(self):
from askbot.mail.messages import AskForSignature
user = self.create_user('user')
message = AskForSignature({
'user': user,
'footer_code': 'nothing'
}).render_body()
self.assertTrue(user.username in message)
| divio/askbot-devel | askbot/tests/email_alert_tests.py | Python | gpl-3.0 | 46,147 |
import time
import math
start = time.time()
primes = [2,3]
sum_primes = [0,2,5]
def is_prime(num):
for i in primes:
if i > math.sqrt(num):
break
if num % i == 0:
return False
return True
first = 6-1
second = 6+1
target = 1000000
while first < target and second < target:
if is_prime(first):
primes.append(first)
sum_primes.append(first + sum_primes[-1])
if is_prime(second):
primes.append(second)
sum_primes.append(second + sum_primes[-1])
first += 6
second += 6
total_prime = len(primes)
print(total_prime)
length = 0
answer = -1
pair = 0,0
for begin in range(0,total_prime-length):
for end in range(total_prime-1, -1, -1):
temp_len = end - begin
if temp_len <= length:
break
diff = sum_primes[end] - sum_primes[begin]
if diff in primes and temp_len > length:
length = temp_len
answer = diff
pair = begin,end
print(answer)
print(length)
print(pair)
print(time.time() - start)
| fresky/ProjectEulerSolution | 050.py | Python | mit | 1,063 |
# Copyright (c) 2012 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import netaddr
import webob.exc
from quantum.api.v2 import attributes
from quantum.api.v2 import resource as wsgi_resource
from quantum.common import exceptions
from quantum.openstack.common import cfg
from quantum.openstack.common import log as logging
from quantum.openstack.common.notifier import api as notifier_api
from quantum import policy
from quantum import quota
LOG = logging.getLogger(__name__)
XML_NS_V20 = 'http://openstack.org/quantum/api/v2.0'
FAULT_MAP = {exceptions.NotFound: webob.exc.HTTPNotFound,
exceptions.InUse: webob.exc.HTTPConflict,
exceptions.Conflict: webob.exc.HTTPConflict,
exceptions.BadRequest: webob.exc.HTTPBadRequest,
exceptions.ResourceExhausted: webob.exc.HTTPServiceUnavailable,
exceptions.MacAddressGenerationFailure:
webob.exc.HTTPServiceUnavailable,
exceptions.StateInvalid: webob.exc.HTTPBadRequest,
exceptions.InvalidInput: webob.exc.HTTPBadRequest,
exceptions.OverlappingAllocationPools: webob.exc.HTTPConflict,
exceptions.OutOfBoundsAllocationPool: webob.exc.HTTPBadRequest,
exceptions.InvalidAllocationPool: webob.exc.HTTPBadRequest,
exceptions.InvalidSharedSetting: webob.exc.HTTPConflict,
exceptions.HostRoutesExhausted: webob.exc.HTTPBadRequest,
exceptions.DNSNameServersExhausted: webob.exc.HTTPBadRequest,
# Some plugins enforce policies as well
exceptions.PolicyNotAuthorized: webob.exc.HTTPForbidden,
netaddr.AddrFormatError: webob.exc.HTTPBadRequest,
AttributeError: webob.exc.HTTPBadRequest,
ValueError: webob.exc.HTTPBadRequest,
exceptions.IpAddressGenerationFailure: webob.exc.HTTPConflict,
}
QUOTAS = quota.QUOTAS
def _fields(request):
"""
Extracts the list of fields to return
"""
return [v for v in request.GET.getall('fields') if v]
def _filters(request, attr_info):
"""
Extracts the filters from the request string
Returns a dict of lists for the filters:
check=a&check=b&name=Bob&
becomes
{'check': [u'a', u'b'], 'name': [u'Bob']}
"""
res = {}
for key in set(request.GET):
if key == 'fields':
continue
values = [v for v in request.GET.getall(key) if v]
key_attr_info = attr_info.get(key, {})
if not key_attr_info and values:
res[key] = values
continue
convert_list_to = key_attr_info.get('convert_list_to')
if not convert_list_to:
convert_to = key_attr_info.get('convert_to')
if convert_to:
convert_list_to = lambda values_: [convert_to(x)
for x in values_]
if convert_list_to:
try:
result_values = convert_list_to(values)
except exceptions.InvalidInput as e:
raise webob.exc.HTTPBadRequest(str(e))
else:
result_values = values
if result_values:
res[key] = result_values
return res
class Controller(object):
def __init__(self, plugin, collection, resource, attr_info,
allow_bulk=False, member_actions=None):
if member_actions is None:
member_actions = []
self._plugin = plugin
self._collection = collection.replace('-', '_')
self._resource = resource
self._attr_info = attr_info
self._allow_bulk = allow_bulk
self._native_bulk = self._is_native_bulk_supported()
self._policy_attrs = [name for (name, info) in self._attr_info.items()
if info.get('required_by_policy')]
self._publisher_id = notifier_api.publisher_id('network')
self._member_actions = member_actions
def _is_native_bulk_supported(self):
native_bulk_attr_name = ("_%s__native_bulk_support"
% self._plugin.__class__.__name__)
return getattr(self._plugin, native_bulk_attr_name, False)
def _is_visible(self, attr):
attr_val = self._attr_info.get(attr)
return attr_val and attr_val['is_visible']
def _view(self, data, fields_to_strip=None):
# make sure fields_to_strip is iterable
if not fields_to_strip:
fields_to_strip = []
return dict(item for item in data.iteritems()
if self._is_visible(item[0])
and not item[0] in fields_to_strip)
def _do_field_list(self, original_fields):
fields_to_add = None
# don't do anything if fields were not specified in the request
if original_fields:
fields_to_add = [attr for attr in self._policy_attrs
if attr not in original_fields]
original_fields.extend(self._policy_attrs)
return original_fields, fields_to_add
def __getattr__(self, name):
if name in self._member_actions:
def _handle_action(request, id, body=None):
return getattr(self._plugin, name)(request.context, id, body)
return _handle_action
else:
raise AttributeError
def _items(self, request, do_authz=False):
"""Retrieves and formats a list of elements of the requested entity"""
# NOTE(salvatore-orlando): The following ensures that fields which
# are needed for authZ policy validation are not stripped away by the
# plugin before returning.
original_fields, fields_to_add = self._do_field_list(_fields(request))
kwargs = {'filters': _filters(request, self._attr_info),
'fields': original_fields}
obj_getter = getattr(self._plugin, "get_%s" % self._collection)
obj_list = obj_getter(request.context, **kwargs)
# Check authz
if do_authz:
# FIXME(salvatore-orlando): obj_getter might return references to
# other resources. Must check authZ on them too.
# Omit items from list that should not be visible
obj_list = [obj for obj in obj_list
if policy.check(request.context,
"get_%s" % self._resource,
obj,
plugin=self._plugin)]
return {self._collection: [self._view(obj,
fields_to_strip=fields_to_add)
for obj in obj_list]}
def _item(self, request, id, do_authz=False, field_list=None):
"""Retrieves and formats a single element of the requested entity"""
kwargs = {'fields': field_list}
action = "get_%s" % self._resource
obj_getter = getattr(self._plugin, action)
obj = obj_getter(request.context, id, **kwargs)
# Check authz
# FIXME(salvatore-orlando): obj_getter might return references to
# other resources. Must check authZ on them too.
if do_authz:
policy.enforce(request.context, action, obj, plugin=self._plugin)
return obj
def index(self, request):
"""Returns a list of the requested entity"""
return self._items(request, True)
def show(self, request, id):
"""Returns detailed information about the requested entity"""
try:
# NOTE(salvatore-orlando): The following ensures that fields
# which are needed for authZ policy validation are not stripped
# away by the plugin before returning.
field_list, added_fields = self._do_field_list(_fields(request))
return {self._resource:
self._view(self._item(request,
id,
do_authz=True,
field_list=field_list),
fields_to_strip=added_fields)}
except exceptions.PolicyNotAuthorized:
# To avoid giving away information, pretend that it
# doesn't exist
raise webob.exc.HTTPNotFound()
def _emulate_bulk_create(self, obj_creator, request, body):
objs = []
try:
for item in body[self._collection]:
kwargs = {self._resource: item}
objs.append(self._view(obj_creator(request.context,
**kwargs)))
return objs
# Note(salvatore-orlando): broad catch as in theory a plugin
# could raise any kind of exception
except Exception as ex:
for obj in objs:
delete_action = "delete_%s" % self._resource
obj_deleter = getattr(self._plugin, delete_action)
try:
obj_deleter(request.context, obj['id'])
except Exception:
# broad catch as our only purpose is to log the exception
LOG.exception(_("Unable to undo add for "
"%(resource)s %(id)s"),
{'resource': self._resource,
'id': obj['id']})
# TODO(salvatore-orlando): The object being processed when the
# plugin raised might have been created or not in the db.
# We need a way for ensuring that if it has been created,
# it is then deleted
raise
def create(self, request, body=None):
"""Creates a new instance of the requested entity"""
notifier_api.notify(request.context,
self._publisher_id,
self._resource + '.create.start',
notifier_api.INFO,
body)
body = Controller.prepare_request_body(request.context, body, True,
self._resource, self._attr_info,
allow_bulk=self._allow_bulk)
action = "create_%s" % self._resource
# Check authz
try:
if self._collection in body:
# Have to account for bulk create
for item in body[self._collection]:
self._validate_network_tenant_ownership(
request,
item[self._resource],
)
policy.enforce(request.context,
action,
item[self._resource],
plugin=self._plugin)
try:
count = QUOTAS.count(request.context, self._resource,
self._plugin, self._collection,
item[self._resource]['tenant_id'])
kwargs = {self._resource: count + 1}
except exceptions.QuotaResourceUnknown as e:
# We don't want to quota this resource
LOG.debug(e)
except Exception:
raise
else:
QUOTAS.limit_check(request.context,
item[self._resource]['tenant_id'],
**kwargs)
else:
self._validate_network_tenant_ownership(
request,
body[self._resource]
)
policy.enforce(request.context,
action,
body[self._resource],
plugin=self._plugin)
try:
count = QUOTAS.count(request.context, self._resource,
self._plugin, self._collection,
body[self._resource]['tenant_id'])
kwargs = {self._resource: count + 1}
except exceptions.QuotaResourceUnknown as e:
# We don't want to quota this resource
LOG.debug(e)
except Exception:
raise
else:
QUOTAS.limit_check(request.context,
body[self._resource]['tenant_id'],
**kwargs)
except exceptions.PolicyNotAuthorized:
LOG.exception(_("Create operation not authorized"))
raise webob.exc.HTTPForbidden()
def notify(create_result):
notifier_api.notify(request.context,
self._publisher_id,
self._resource + '.create.end',
notifier_api.INFO,
create_result)
return create_result
if self._collection in body and self._native_bulk:
# plugin does atomic bulk create operations
obj_creator = getattr(self._plugin, "%s_bulk" % action)
objs = obj_creator(request.context, body)
return notify({self._collection: [self._view(obj)
for obj in objs]})
else:
obj_creator = getattr(self._plugin, action)
if self._collection in body:
# Emulate atomic bulk behavior
objs = self._emulate_bulk_create(obj_creator, request, body)
return notify({self._collection: objs})
else:
kwargs = {self._resource: body}
obj = obj_creator(request.context, **kwargs)
return notify({self._resource: self._view(obj)})
def delete(self, request, id):
"""Deletes the specified entity"""
notifier_api.notify(request.context,
self._publisher_id,
self._resource + '.delete.start',
notifier_api.INFO,
{self._resource + '_id': id})
action = "delete_%s" % self._resource
# Check authz
obj = self._item(request, id)
try:
policy.enforce(request.context,
action,
obj,
plugin=self._plugin)
except exceptions.PolicyNotAuthorized:
# To avoid giving away information, pretend that it
# doesn't exist
raise webob.exc.HTTPNotFound()
obj_deleter = getattr(self._plugin, action)
obj_deleter(request.context, id)
notifier_api.notify(request.context,
self._publisher_id,
self._resource + '.delete.end',
notifier_api.INFO,
{self._resource + '_id': id})
def update(self, request, id, body=None):
"""Updates the specified entity's attributes"""
payload = body.copy()
payload['id'] = id
notifier_api.notify(request.context,
self._publisher_id,
self._resource + '.update.start',
notifier_api.INFO,
payload)
body = Controller.prepare_request_body(request.context, body, False,
self._resource, self._attr_info,
allow_bulk=self._allow_bulk)
action = "update_%s" % self._resource
# Load object to check authz
# but pass only attributes in the original body and required
# by the policy engine to the policy 'brain'
field_list = [name for (name, value) in self._attr_info.iteritems()
if ('required_by_policy' in value and
value['required_by_policy'] or
not 'default' in value)]
orig_obj = self._item(request, id, field_list=field_list)
orig_obj.update(body[self._resource])
try:
policy.enforce(request.context,
action,
orig_obj,
plugin=self._plugin)
except exceptions.PolicyNotAuthorized:
# To avoid giving away information, pretend that it
# doesn't exist
raise webob.exc.HTTPNotFound()
obj_updater = getattr(self._plugin, action)
kwargs = {self._resource: body}
obj = obj_updater(request.context, id, **kwargs)
result = {self._resource: self._view(obj)}
notifier_api.notify(request.context,
self._publisher_id,
self._resource + '.update.end',
notifier_api.INFO,
result)
return result
@staticmethod
def _populate_tenant_id(context, res_dict, is_create):
if (('tenant_id' in res_dict and
res_dict['tenant_id'] != context.tenant_id and
not context.is_admin)):
msg = _("Specifying 'tenant_id' other than authenticated "
"tenant in request requires admin privileges")
raise webob.exc.HTTPBadRequest(msg)
if is_create and 'tenant_id' not in res_dict:
if context.tenant_id:
res_dict['tenant_id'] = context.tenant_id
else:
msg = _("Running without keystyone AuthN requires "
" that tenant_id is specified")
raise webob.exc.HTTPBadRequest(msg)
@staticmethod
def prepare_request_body(context, body, is_create, resource, attr_info,
allow_bulk=False):
""" verifies required attributes are in request body, and that
an attribute is only specified if it is allowed for the given
operation (create/update).
Attribute with default values are considered to be
optional.
body argument must be the deserialized body
"""
collection = resource + "s"
if not body:
raise webob.exc.HTTPBadRequest(_("Resource body required"))
body = body or {resource: {}}
if collection in body and allow_bulk:
bulk_body = [Controller.prepare_request_body(
context, {resource: b}, is_create, resource, attr_info,
allow_bulk) if resource not in b
else Controller.prepare_request_body(
context, b, is_create, resource, attr_info, allow_bulk)
for b in body[collection]]
if not bulk_body:
raise webob.exc.HTTPBadRequest(_("Resources required"))
return {collection: bulk_body}
elif collection in body and not allow_bulk:
raise webob.exc.HTTPBadRequest("Bulk operation not supported")
res_dict = body.get(resource)
if res_dict is None:
msg = _("Unable to find '%s' in request body") % resource
raise webob.exc.HTTPBadRequest(msg)
Controller._populate_tenant_id(context, res_dict, is_create)
if is_create: # POST
for attr, attr_vals in attr_info.iteritems():
is_required = ('default' not in attr_vals and
attr_vals['allow_post'])
if is_required and attr not in res_dict:
msg = _("Failed to parse request. Required "
" attribute '%s' not specified") % attr
raise webob.exc.HTTPBadRequest(msg)
if not attr_vals['allow_post'] and attr in res_dict:
msg = _("Attribute '%s' not allowed in POST") % attr
raise webob.exc.HTTPBadRequest(msg)
if attr_vals['allow_post']:
res_dict[attr] = res_dict.get(attr,
attr_vals.get('default'))
else: # PUT
for attr, attr_vals in attr_info.iteritems():
if attr in res_dict and not attr_vals['allow_put']:
msg = _("Cannot update read-only attribute %s") % attr
raise webob.exc.HTTPBadRequest(msg)
for attr, attr_vals in attr_info.iteritems():
# Convert values if necessary
if ('convert_to' in attr_vals and
attr in res_dict and
res_dict[attr] != attributes.ATTR_NOT_SPECIFIED):
res_dict[attr] = attr_vals['convert_to'](res_dict[attr])
# Check that configured values are correct
if not ('validate' in attr_vals and
attr in res_dict and
res_dict[attr] != attributes.ATTR_NOT_SPECIFIED):
continue
for rule in attr_vals['validate']:
res = attributes.validators[rule](res_dict[attr],
attr_vals['validate'][rule])
if res:
msg_dict = dict(attr=attr, reason=res)
msg = _("Invalid input for %(attr)s. "
"Reason: %(reason)s.") % msg_dict
raise webob.exc.HTTPBadRequest(msg)
return body
def _validate_network_tenant_ownership(self, request, resource_item):
# TODO(salvatore-orlando): consider whether this check can be folded
# in the policy engine
if self._resource not in ('port', 'subnet'):
return
network = self._plugin.get_network(
request.context,
resource_item['network_id'])
# do not perform the check on shared networks
if network.get('shared'):
return
network_owner = network['tenant_id']
if network_owner != resource_item['tenant_id']:
msg = _("Tenant %(tenant_id)s not allowed to "
"create %(resource)s on this network")
raise webob.exc.HTTPForbidden(msg % {
"tenant_id": resource_item['tenant_id'],
"resource": self._resource,
})
def create_resource(collection, resource, plugin, params, allow_bulk=False,
member_actions=None):
controller = Controller(plugin, collection, resource, params, allow_bulk,
member_actions=member_actions)
# NOTE(jkoelker) To anyone wishing to add "proper" xml support
# this is where you do it
serializers = {}
# 'application/xml': wsgi.XMLDictSerializer(metadata, XML_NS_V20),
deserializers = {}
# 'application/xml': wsgi.XMLDeserializer(metadata),
return wsgi_resource.Resource(controller, FAULT_MAP, deserializers,
serializers)
| aristanetworks/arista-ovs-quantum | quantum/api/v2/base.py | Python | apache-2.0 | 23,673 |
# Wrapper for pomegranate.distributions.NormalDistribution
import sys
import numpy as np
from pomegranate.distributions import NormalDistribution as ND
import chippr
class gauss(object):
def __init__(self, mean, var, bounds=None):
"""
A univariate Gaussian probability distribution object
Parameters
----------
mean: float
mean of Gaussian probability distribution
var: float
variance of Gaussian probability distribution
"""
self.mean = mean
self.var = var
self.sigma = self.norm_var()
self.invvar = self.invert_var()
self.dist = ND(self.mean, self.sigma)
def norm_var(self):
"""
Function to create standard deviation from variance
"""
return np.sqrt(self.var)
def invert_var(self):
"""
Function to invert variance
"""
return 1./self.var
def pdf(self, xs):
return self.evaluate(xs)
def evaluate_one(self, x):
"""
Function to evaluate Gaussian probability distribution once
Parameters
----------
x: float
value at which to evaluate Gaussian probability distribution
Returns
-------
p: float
probability associated with x
"""
# p = 1. / (np.sqrt(2. * np.pi) * self.sigma) * \
# np.exp(-0.5 * (self.mean - x) * self.invvar * (self.mean - x))
p = self.dist.probability(x)
return p
def evaluate(self, xs):
"""
Function to evaluate univariate Gaussian probability distribution at multiple points
Parameters
----------
xs: numpy.ndarray, float
input values at which to evaluate probability
Returns
-------
ps: ndarray, float
output probabilities
"""
# ps = 1. / (np.sqrt(2. * np.pi) * self.sigma) * \
# np.exp(-0.5 * (self.mean - xs) * self.invvar * (self.mean - xs))
# ps = np.zeros_like(xs)
# for n, x in enumerate(xs):
# ps[n] += self.evaluate_one(x)
ps = self.dist.probability(xs)
return ps
def sample_one(self):
"""
Function to take one sample from univariate Gaussian probability distribution
Returns
-------
x: float
single sample from Gaussian probability distribution
"""
# x = self.mean + self.sigma * np.random.normal()
x = self.dist.sample(1)
return x
def sample(self, n_samps):
"""
Function to sample univariate Gaussian probability distribution
Parameters
----------
n_samps: positive int
number of samples to take
Returns
-------
xs: ndarray, float
array of n_samps samples from Gaussian probability distribution
"""
# print('gauss trying to sample '+str(n_samps)+' from '+str(self.dist))
# xs = np.array([self.sample_one() for n in range(n_samps)])
xs = np.array(self.dist.sample(n_samps))
# print('gauss sampled '+str(n_samps)+' from '+str(self.dist))
return xs
| aimalz/chippr | chippr/gauss.py | Python | mit | 3,234 |
#
# This file is part of Dragonfly.
# (c) Copyright 2007, 2008 by Christo Butcher
# Licensed under the LGPL.
#
# Dragonfly is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Dragonfly is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with Dragonfly. If not, see
# <http://www.gnu.org/licenses/>.
#
import unittest
from dragonfly.windows.window import Window
#===========================================================================
class TestWindow(unittest.TestCase):
def setUp(self):
pass
def test_set_handle(self):
""" Test access to Window.handle property. """
# Verify that only integers and longs are accepted.
Window(0)
Window(int(1))
Window(long(2))
self.assertRaises(TypeError, Window, [None])
self.assertRaises(TypeError, Window, ["string"])
self.assertRaises(TypeError, Window, [3.4])
#===========================================================================
if __name__ == "__main__":
unittest.main()
| summermk/dragonfly | dragonfly/test/test_window.py | Python | lgpl-3.0 | 1,553 |
from django import forms
from django.utils.translation import ugettext_lazy as _
from knowledge import settings
from knowledge.models import Question, Response
OPTIONAL_FIELDS = ['alert', 'phone_number']
__todo__ = """
This is serious badness. Really? Functions masquerading as
clases? Lame. This should be fixed. Sorry.
~bryan
"""
def QuestionForm(user, *args, **kwargs):
"""
Build and return the appropriate form depending
on the status of the passed in user.
"""
if user.is_anonymous():
if not settings.ALLOW_ANONYMOUS:
return None
else:
selected_fields = ['name', 'email', 'title', 'body']
else:
selected_fields = ['user', 'title', 'body', 'status']
# if settings.ALERTS:
# selected_fields += ['alert']
class _QuestionForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(_QuestionForm, self).__init__(*args, **kwargs)
for key in self.fields:
if not key in OPTIONAL_FIELDS:
self.fields[key].required = True
# hide the internal status for non-staff
qf = self.fields.get('status', None)
if qf and not user.is_staff:
choices = list(qf.choices)
choices.remove(('internal', _('Internal')))
qf.choices = choices
# a bit of a hack...
# hide a field, and use clean to force
# a specific value of ours
for key in ['user']:
qf = self.fields.get(key, None)
if qf:
qf.widget = qf.hidden_widget()
qf.required = False
# honey pot!
phone_number = forms.CharField(required=False)
def clean_user(self):
return user
class Meta:
model = Question
fields = selected_fields
return _QuestionForm(*args, **kwargs)
def ResponseForm(user, question, *args, **kwargs):
"""
Build and return the appropriate form depending
on the status of the passed in user and question.
"""
if question.locked:
return None
if not settings.FREE_RESPONSE and not \
(user.is_staff or question.user == user):
return None
if user.is_anonymous():
if not settings.ALLOW_ANONYMOUS:
return None
else:
selected_fields = ['name', 'email']
else:
selected_fields = ['user']
selected_fields += ['body', 'question']
if user.is_staff:
selected_fields += ['status']
if settings.ALERTS:
selected_fields += ['alert']
class _ResponseForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(_ResponseForm, self).__init__(*args, **kwargs)
for key in self.fields:
if not key in OPTIONAL_FIELDS:
self.fields[key].required = True
# a bit of a hack...
for key in ['user', 'question']:
qf = self.fields.get(key, None)
if qf:
qf.widget = qf.hidden_widget()
qf.required = False
# honey pot!
phone_number = forms.CharField(required=False)
def clean_user(self):
return user
def clean_question(self):
return question
class Meta:
model = Response
fields = selected_fields
return _ResponseForm(*args, **kwargs)
| RDXT/django-knowledge | knowledge/forms.py | Python | isc | 3,574 |
import pandas as pd
import quandl
import math
import numpy as np
from sklearn import preprocessing, cross_validation, svm
from sklearn.linear_model import LinearRegression
import datetime
import matplotlib.pyplot as plt
from matplotlib import style
import pickle
style.use('ggplot')
quandl.ApiConfig.api_key = 'R7Wd_FwtDaxiyVEDg9dw'
df = quandl.get('WIKI/GOOGL',rows=1000)
df = df[['Adj. Open', 'Adj. High', 'Adj. Low', 'Adj. Close', 'Adj. Volume']]
df['HL_PCT'] = (df['Adj. High'] - df['Adj. Low']) / df['Adj. Close'] * 100.0
df['PCT_change'] = (df['Adj. Close'] - df['Adj. Open']) / df['Adj. Open'] * 100.0
df = df[['Adj. Close', 'HL_PCT', 'PCT_change', 'Adj. Volume']]
forecast_col = 'Adj. Close'
df.fillna(-99999,inplace=True)
forecast_out = int(math.ceil(0.1*len(df)))
df['label'] = df[forecast_col].shift(-forecast_out);
X = np.array(df.drop(['label'],1))
X = preprocessing.scale(X)
X_lately = X[-forecast_out:]
X = X[:-forecast_out]
df.dropna(inplace=True)
y = np.array(df['label'])
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X,y,test_size=0.2)
df['Forecast'] = np.nan
# clf = LinearRegression(n_jobs=-1)
# clf.fit(X_train,y_train)
#
# with open('linearregression.pickle','wb') as f:
# pickle.dump(clf,f)
pickle_in = open('linearregression.pickle','rb')
clf = pickle.load(pickle_in)
confidence = clf.score(X_test,y_test)
print(confidence)
forecast_set = clf.predict(X_lately)
last_date = df.iloc[-1].name
last_unix = last_date.timestamp()
one_day = 86400
next_unix = last_unix + one_day
for i in forecast_set:
next_date = datetime.datetime.fromtimestamp(next_unix)
next_unix += 86400
df.loc[next_date] = [np.nan for _ in range(len(df.columns)-1)]+[i]
print(df['Forecast'].tail())
df['Adj. Close'].plot()
df['Forecast'].plot()
plt.legend(loc=4)
plt.xlabel('Date')
plt.ylabel('Price')
plt.show()
| rohitgadia/MachineLearningPractice | Regression/ex1.py | Python | gpl-3.0 | 1,872 |
'''
malis2profiles.py - build profiles from malis
=============================================
:Author: Andreas Heger
:Release: $Id$
:Date: |today|
:Tags: Python
Purpose
-------
convert a set of plain alignments to profiles.
Usage
-----
Example::
python malis2profiles.py --help
Type::
python malis2profiles.py --help
for command line help.
Command line options
--------------------
'''
import sys
import CGAT.Experiment as E
import CGAT.Mali as Mali
def main(argv=None):
"""script main.
parses command line options in sys.argv, unless *argv* is given.
"""
if argv is None:
argv = sys.argv
parser = E.OptionParser(
version="%prog version: $Id$",
usage=globals()["__doc__"])
parser.set_defaults(
)
(options, args) = E.Start(parser)
mali = Mali.SequenceCollection()
last_id = None
ninput, noutput, nskipped = 0, 0, 0
for line in sys.stdin:
if line[0] == "#":
continue
start, ali, end, id = line[:-1].split("\t")
ninput += 1
if id != last_id:
if last_id:
mali.setName(last_id)
mali.writeToFile(sys.stdout, format="profile")
noutput += 1
mali = Mali.SequenceCollection()
last_id = id
mali.addSequence(id, start, end, ali)
if last_id:
mali.setName(last_id)
mali.writeToFile(sys.stdout, format="profile")
noutput += 1
if options.loglevel >= 1:
options.stdlog.write(
"# ninput=%i, noutput=%i, nskipped=%i.\n" % (ninput, noutput, nskipped))
E.Stop()
if __name__ == "__main__":
sys.exit(main(sys.argv))
| CGATOxford/Optic | scripts/malis2profiles.py | Python | mit | 1,696 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
PACKAGE_NAME = 'mozdevice'
PACKAGE_VERSION = '0.33'
deps = ['mozfile >= 1.0',
'mozlog',
'moznetwork >= 0.24'
]
setup(name=PACKAGE_NAME,
version=PACKAGE_VERSION,
description="Mozilla-authored device management",
long_description="see http://mozbase.readthedocs.org/",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Mozilla Automation and Testing Team',
author_email='[email protected]',
url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
license='MPL',
packages=['mozdevice'],
include_package_data=True,
zip_safe=False,
install_requires=deps,
entry_points="""
# -*- Entry points: -*-
[console_scripts]
dm = mozdevice.dmcli:cli
sutini = mozdevice.sutini:main
""",
)
| michath/ConMonkey | testing/mozbase/mozdevice/setup.py | Python | mpl-2.0 | 1,118 |
import os
from common.serializers.json_serializer import JsonSerializer
from ledger.compact_merkle_tree import CompactMerkleTree
from ledger.genesis_txn.genesis_txn_file_util import genesis_txn_file
from ledger.genesis_txn.genesis_txn_initiator import GenesisTxnInitiator
from ledger.ledger import Ledger
from storage import store_utils
from storage.text_file_store import TextFileStore
from stp_core.common.log import getlogger
logger = getlogger()
class GenesisTxnInitiatorFromFile(GenesisTxnInitiator):
"""
Creates genesis txn as a text file.
Can init the ledger from a text file-based genesis txn file.
"""
def __init__(self, data_dir, txn_file, serializer=JsonSerializer()):
self.__data_dir = data_dir
self.__db_name = genesis_txn_file(txn_file)
self.__serializer = serializer
def init_ledger_from_genesis_txn(self, ledger: Ledger):
# TODO: it's possible that the file to be used for initialization does not exist.
# This is not considered as an error as of now.
init_file = os.path.join(self.__data_dir, self.__db_name)
if not os.path.exists(init_file):
logger.display("File that should be used for initialization of "
"Ledger does not exist: {}".format(init_file))
return
with open(init_file, 'r') as f:
for line in store_utils.cleanLines(f):
txn = self.__serializer.deserialize(line)
ledger.add(txn)
def create_initiator_ledger(self) -> Ledger:
store = TextFileStore(self.__data_dir,
self.__db_name,
isLineNoKey=True,
storeContentHash=False,
ensureDurability=False)
return Ledger(CompactMerkleTree(),
dataDir=self.__data_dir,
txn_serializer=self.__serializer,
fileName=self.__db_name,
transactionLogStore=store)
| evernym/plenum | ledger/genesis_txn/genesis_txn_initiator_from_file.py | Python | apache-2.0 | 2,039 |
#
# The Qubes OS Project, https://www.qubes-os.org/
#
# Copyright (C) 2014-2015 Joanna Rutkowska <[email protected]>
# Copyright (C) 2014-2015 Wojtek Porczyk <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, see <https://www.gnu.org/licenses/>.
#
import subprocess
import unittest
import unittest.mock
import functools
import asyncio
import qubes
import qubes.exc
import qubes.vm
import qubes.vm.adminvm
import qubes.tests
class TC_00_AdminVM(qubes.tests.QubesTestCase):
def setUp(self):
super().setUp()
try:
self.app = qubes.tests.vm.TestApp()
with unittest.mock.patch.object(
qubes.vm.adminvm.AdminVM, 'start_qdb_watch') as mock_qdb:
self.vm = qubes.vm.adminvm.AdminVM(self.app,
xml=None)
mock_qdb.assert_called_once_with()
self.addCleanup(self.cleanup_adminvm)
except: # pylint: disable=bare-except
if self.id().endswith('.test_000_init'):
raise
self.skipTest('setup failed')
def tearDown(self) -> None:
self.app.domains.clear()
def add_vm(self, name, cls=qubes.vm.qubesvm.QubesVM, **kwargs):
vm = cls(self.app, None,
qid=kwargs.pop('qid', 1), name=qubes.tests.VMPREFIX + name,
**kwargs)
self.app.domains[vm.qid] = vm
self.app.domains[vm.uuid] = vm
self.app.domains[vm.name] = vm
self.app.domains[vm] = vm
self.addCleanup(vm.close)
return vm
async def coroutine_mock(self, mock, *args, **kwargs):
return mock(*args, **kwargs)
def cleanup_adminvm(self):
self.vm.close()
del self.vm
def test_000_init(self):
pass
def test_001_property_icon(self):
self.assertEqual(self.vm.icon, 'adminvm-black')
def test_100_xid(self):
self.assertEqual(self.vm.xid, 0)
def test_101_libvirt_domain(self):
with unittest.mock.patch.object(self.app, 'vmm') as mock_vmm:
self.assertIsNotNone(self.vm.libvirt_domain)
self.assertEqual(mock_vmm.mock_calls, [
('libvirt_conn.lookupByID', (0,), {}),
])
def test_300_is_running(self):
self.assertTrue(self.vm.is_running())
def test_301_get_power_state(self):
self.assertEqual(self.vm.get_power_state(), 'Running')
def test_302_get_mem(self):
self.assertGreater(self.vm.get_mem(), 0)
@unittest.skip('mock object does not support this')
def test_303_get_mem_static_max(self):
self.assertGreater(self.vm.get_mem_static_max(), 0)
def test_310_start(self):
with self.assertRaises(qubes.exc.QubesException):
self.vm.start()
@unittest.skip('this functionality is undecided')
def test_311_suspend(self):
with self.assertRaises(qubes.exc.QubesException):
self.vm.suspend()
@unittest.mock.patch('asyncio.create_subprocess_exec')
def test_700_run_service(self, mock_subprocess):
self.add_vm('vm')
with self.subTest('running'):
self.loop.run_until_complete(self.vm.run_service('test.service'))
mock_subprocess.assert_called_once_with(
'/usr/lib/qubes/qubes-rpc-multiplexer',
'test.service', 'dom0', 'name', 'dom0')
mock_subprocess.reset_mock()
with self.subTest('other_user'):
self.loop.run_until_complete(
self.vm.run_service('test.service', user='other'))
mock_subprocess.assert_called_once_with(
'runuser', '-u', 'other', '--',
'/usr/lib/qubes/qubes-rpc-multiplexer',
'test.service', 'dom0', 'name', 'dom0')
mock_subprocess.reset_mock()
with self.subTest('other_source'):
self.loop.run_until_complete(
self.vm.run_service('test.service', source='test-inst-vm'))
mock_subprocess.assert_called_once_with(
'/usr/lib/qubes/qubes-rpc-multiplexer',
'test.service', 'test-inst-vm', 'name', 'dom0')
@unittest.mock.patch('qubes.vm.adminvm.AdminVM.run_service')
def test_710_run_service_for_stdio(self, mock_run_service):
communicate_mock = mock_run_service.return_value.communicate
communicate_mock.return_value = (b'stdout', b'stderr')
mock_run_service.return_value.returncode = 0
with self.subTest('default'):
value = self.loop.run_until_complete(
self.vm.run_service_for_stdio('test.service'))
mock_run_service.assert_called_once_with(
'test.service',
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE)
communicate_mock.assert_called_once_with(input=None)
self.assertEqual(value, (b'stdout', b'stderr'))
mock_run_service.reset_mock()
communicate_mock.reset_mock()
with self.subTest('with_input'):
value = self.loop.run_until_complete(
self.vm.run_service_for_stdio('test.service', input=b'abc'))
mock_run_service.assert_called_once_with(
'test.service',
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE)
communicate_mock.assert_called_once_with(input=b'abc')
self.assertEqual(value, (b'stdout', b'stderr'))
mock_run_service.reset_mock()
communicate_mock.reset_mock()
with self.subTest('error'):
mock_run_service.return_value.returncode = 1
with self.assertRaises(subprocess.CalledProcessError) as exc:
self.loop.run_until_complete(
self.vm.run_service_for_stdio('test.service'))
mock_run_service.assert_called_once_with(
'test.service',
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE)
communicate_mock.assert_called_once_with(input=None)
self.assertEqual(exc.exception.returncode, 1)
self.assertEqual(exc.exception.output, b'stdout')
self.assertEqual(exc.exception.stderr, b'stderr')
| QubesOS/qubes-core-admin | qubes/tests/vm/adminvm.py | Python | lgpl-2.1 | 6,926 |
from PyQt4 import QtCore,QtGui,uic
from servicemodel import ServiceModel
from servicedialog import ServiceDialog
from blur.Classes import Service, ServiceList
class MainWindow(QtGui.QMainWindow):
def __init__(self):
QtGui.QMainWindow.__init__(self)
uic.loadUi("ui/mainwindow.ui",self)
self.setWindowTitle( 'AssTamer v' )
self.ServiceModel = ServiceModel(self)
print (self.ServiceModel.index(0,0).parent() == QtCore.QModelIndex())
self.mServiceTree.setModel(self.ServiceModel)
self.connect( self.mServiceTree, QtCore.SIGNAL('showMenu( const QPoint &, const Record &, RecordList )'), self.showServiceTreeMenu )
self.setupActions()
self.setupToolbar()
self.setupMenu()
self.RefreshTimer = QtCore.QTimer(self)
self.connect( self.RefreshTimer, QtCore.SIGNAL('timeout()'), self.refresh )
self.refresh()
#self.RefreshTimer.start( 30 * 1000 )
def refresh(self):
self.ServiceModel.refresh()
self.mServiceTree.viewport().update()
def setAutoRefreshEnabled(self, enabled):
if enabled:
self.RefreshTimer.start( 30 * 1000 )
else:
self.RefreshTimer.stop()
def setupActions(self):
self.RefreshAction = QtGui.QAction('Refresh',self)
self.connect( self.RefreshAction, QtCore.SIGNAL('triggered()'), self.refresh )
self.QuitAction = QtGui.QAction('Quit',self)
self.connect( self.QuitAction, QtCore.SIGNAL('triggered()'), QtGui.QApplication.instance(), QtCore.SLOT('quit()'))
self.AutoRefreshAction = QtGui.QAction('Auto Refresh',self)
self.AutoRefreshAction.setCheckable( True )
self.connect( self.AutoRefreshAction, QtCore.SIGNAL('toggled(bool)'), self.setAutoRefreshEnabled )
def setupToolbar(self):
self.Toolbar = self.addToolBar('Main Toolbar')
self.Toolbar.addAction( self.RefreshAction )
def setupMenu(self):
self.FileMenu = self.menuBar().addMenu( "File" )
self.FileMenu.addAction( self.QuitAction )
self.ViewMenu = self.menuBar().addMenu( "View" )
self.ViewMenu.addAction( self.AutoRefreshAction )
def updateActions(self):
pass
def showServiceTreeMenu(self, pos, recUnder, recsSel):
menu = QtGui.QMenu(self)
editServicesAction = None
# All services
if recsSel.size() == ServiceList(recsSel).size():
editServicesAction = menu.addAction( "Edit Services" )
result = menu.exec_(pos)
if result and result==editServicesAction:
sd = ServiceDialog(self)
sd.setServices(recsSel)
sd.exec_()
| lordtangent/arsenalsuite | python/asstamer/mainwindow.py | Python | gpl-2.0 | 2,409 |
import json
from django.urls import reverse
from seaserv import seafile_api
from seahub.test_utils import BaseTestCase
from seahub.base.templatetags.seahub_tags import email2nickname, \
email2contact_email
class RepoViewTest(BaseTestCase):
def setUp(self):
self.user_name = self.user.username
self.admin_name = self.admin.username
self.url = reverse('api-v2.1-repo-view', args=[self.repo.id])
def tearDown(self):
self.remove_repo()
def test_can_get(self):
self.login_as(self.user)
resp = self.client.get(self.url)
self.assertEqual(200, resp.status_code)
json_resp = json.loads(resp.content)
assert json_resp['repo_id'] == self.repo.id
assert json_resp['repo_name'] == self.repo.name
assert json_resp['owner_email'] == self.user_name
assert json_resp['owner_name'] == email2nickname(self.user_name)
assert json_resp['owner_contact_email'] == email2contact_email(self.user_name)
assert json_resp['permission'] == 'rw'
assert json_resp['status'] == 'normal'
self.assertFalse(json_resp['encrypted'])
self.assertIsNotNone(json_resp['file_count'])
self.assertIsNotNone(json_resp['size'])
def test_can_get_be_shared_repo_info(self):
# create admin repo
admin_repo_id = seafile_api.create_repo(name='test-repo', desc='',
username=self.admin_name, passwd=None)
admin_repo = seafile_api.get_repo(admin_repo_id)
# share admin repo to current user
permission = 'r'
seafile_api.share_repo(admin_repo_id, self.admin_name,
self.user_name, permission)
self.login_as(self.user)
url = reverse('api-v2.1-repo-view', args=[admin_repo_id])
resp = self.client.get(url)
self.assertEqual(200, resp.status_code)
json_resp = json.loads(resp.content)
assert json_resp['repo_id'] == admin_repo.id
assert json_resp['repo_name'] == admin_repo.name
assert json_resp['owner_email'] == self.admin_name
assert json_resp['owner_name'] == email2nickname(self.admin_name)
assert json_resp['owner_contact_email'] == email2contact_email(self.admin_name)
assert json_resp['permission'] == permission
assert json_resp['status'] == 'normal'
self.assertFalse(json_resp['encrypted'])
self.assertIsNotNone(json_resp['file_count'])
self.assertIsNotNone(json_resp['size'])
self.remove_repo(admin_repo_id)
def test_get_with_invalid_authentication(self):
self.login_as(self.admin)
resp = self.client.get(self.url)
self.assertEqual(403, resp.status_code)
def test_get_with_invalid_permission(self):
admin_repo_id = seafile_api.create_repo(name='test-repo', desc='',
username=self.admin_name, passwd=None)
self.login_as(self.user)
url = reverse('api-v2.1-repo-view', args=[admin_repo_id])
resp = self.client.get(url)
self.assertEqual(403, resp.status_code)
self.remove_repo(admin_repo_id)
def test_get_with_invalid_repo(self):
self.login_as(self.user)
repo_id = self.repo.id
invalid_repo_id = repo_id[0:-5] + '12345'
url = reverse('api-v2.1-repo-view', args=[invalid_repo_id])
resp = self.client.get(url)
self.assertEqual(404, resp.status_code)
def test_delete_with_normal_status(self):
self.login_as(self.user)
resp = self.client.delete(self.url)
self.assertEqual(200, resp.status_code)
json_resp = json.loads(resp.content)
assert json_resp == 'success'
def test_delete_with_read_only_status(self):
self.login_as(self.user)
seafile_api.set_repo_status(self.repo.id, 1)
resp = self.client.delete(self.url)
self.assertEqual(403, resp.status_code)
| miurahr/seahub | tests/api/endpoints/test_repos.py | Python | apache-2.0 | 3,930 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('polls', '0004_auto_20150504_1427'),
]
operations = [
migrations.AddField(
model_name='simplepoll',
name='name_cz',
field=models.CharField(max_length=200, null=True, verbose_name='name'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepoll',
name='name_de',
field=models.CharField(max_length=200, null=True, verbose_name='name'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepoll',
name='name_en',
field=models.CharField(max_length=200, null=True, verbose_name='name'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepoll',
name='name_es',
field=models.CharField(max_length=200, null=True, verbose_name='name'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepoll',
name='name_fr',
field=models.CharField(max_length=200, null=True, verbose_name='name'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepoll',
name='name_it',
field=models.CharField(max_length=200, null=True, verbose_name='name'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepoll',
name='name_pl',
field=models.CharField(max_length=200, null=True, verbose_name='name'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepoll',
name='name_pt',
field=models.CharField(max_length=200, null=True, verbose_name='name'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepollanswer',
name='text_cz',
field=models.CharField(default='', max_length=255, null=True, verbose_name='answer'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepollanswer',
name='text_de',
field=models.CharField(default='', max_length=255, null=True, verbose_name='answer'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepollanswer',
name='text_en',
field=models.CharField(default='', max_length=255, null=True, verbose_name='answer'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepollanswer',
name='text_es',
field=models.CharField(default='', max_length=255, null=True, verbose_name='answer'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepollanswer',
name='text_fr',
field=models.CharField(default='', max_length=255, null=True, verbose_name='answer'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepollanswer',
name='text_it',
field=models.CharField(default='', max_length=255, null=True, verbose_name='answer'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepollanswer',
name='text_pl',
field=models.CharField(default='', max_length=255, null=True, verbose_name='answer'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepollanswer',
name='text_pt',
field=models.CharField(default='', max_length=255, null=True, verbose_name='answer'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepollquestion',
name='text_cz',
field=models.TextField(default='', null=True, verbose_name='question'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepollquestion',
name='text_de',
field=models.TextField(default='', null=True, verbose_name='question'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepollquestion',
name='text_en',
field=models.TextField(default='', null=True, verbose_name='question'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepollquestion',
name='text_es',
field=models.TextField(default='', null=True, verbose_name='question'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepollquestion',
name='text_fr',
field=models.TextField(default='', null=True, verbose_name='question'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepollquestion',
name='text_it',
field=models.TextField(default='', null=True, verbose_name='question'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepollquestion',
name='text_pl',
field=models.TextField(default='', null=True, verbose_name='question'),
preserve_default=True,
),
migrations.AddField(
model_name='simplepollquestion',
name='text_pt',
field=models.TextField(default='', null=True, verbose_name='question'),
preserve_default=True,
),
]
| CivilHub/CivilHub | polls/migrations/0005_auto_20150504_1618.py | Python | gpl-3.0 | 5,847 |
"""Test the cross_validation module"""
import warnings
import numpy as np
from scipy.sparse import coo_matrix
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_not_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_warns
from sklearn.utils.testing import ignore_warnings
from sklearn.utils.fixes import unique
from sklearn import cross_validation as cval
from sklearn.base import BaseEstimator
from sklearn.datasets import make_regression
from sklearn.datasets import load_iris
from sklearn.metrics import accuracy_score
from sklearn.metrics import f1_score
from sklearn.metrics import explained_variance_score
from sklearn.metrics import fbeta_score
from sklearn.metrics import make_scorer
from sklearn.externals import six
from sklearn.linear_model import Ridge
from sklearn.svm import SVC
class MockListClassifier(BaseEstimator):
"""Dummy classifier to test the cross-validation.
Checks that GridSearchCV didn't convert X to array.
"""
def __init__(self, foo_param=0):
self.foo_param = foo_param
def fit(self, X, Y):
assert_true(len(X) == len(Y))
assert_true(isinstance(X, list))
return self
def predict(self, T):
return T.shape[0]
def score(self, X=None, Y=None):
if self.foo_param > 1:
score = 1.
else:
score = 0.
return score
class MockClassifier(BaseEstimator):
"""Dummy classifier to test the cross-validation"""
def __init__(self, a=0):
self.a = a
def fit(self, X, Y=None, sample_weight=None, class_prior=None):
if sample_weight is not None:
assert_true(sample_weight.shape[0] == X.shape[0],
'MockClassifier extra fit_param sample_weight.shape[0]'
' is {0}, should be {1}'.format(sample_weight.shape[0],
X.shape[0]))
if class_prior is not None:
assert_true(class_prior.shape[0] == len(np.unique(y)),
'MockClassifier extra fit_param class_prior.shape[0]'
' is {0}, should be {1}'.format(class_prior.shape[0],
len(np.unique(y))))
return self
def predict(self, T):
return T.shape[0]
def score(self, X=None, Y=None):
return 1. / (1 + np.abs(self.a))
X = np.ones((10, 2))
X_sparse = coo_matrix(X)
y = np.arange(10) // 2
##############################################################################
# Tests
def test_kfold_valueerrors():
# Check that errors are raised if there is not enough samples
assert_raises(ValueError, cval.KFold, 3, 4)
# Check that a warning is raised if the least populated class has too few
# members.
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
y = [0, 0, 1, 1, 2]
cval.StratifiedKFold(y, 3)
# checking there was only one warning.
assert_equal(len(w), 1)
# checking it has the right type
assert_equal(w[0].category, Warning)
# checking it's the right warning. This might be a bad test since it's
# a characteristic of the code and not a behavior
assert_true("The least populated class" in str(w[0]))
# Error when number of folds is <= 1
assert_raises(ValueError, cval.KFold, 2, 0)
assert_raises(ValueError, cval.KFold, 2, 1)
assert_raises(ValueError, cval.StratifiedKFold, y, 0)
assert_raises(ValueError, cval.StratifiedKFold, y, 1)
# When n is not integer:
assert_raises(ValueError, cval.KFold, 2.5, 2)
# When n_folds is not integer:
assert_raises(ValueError, cval.KFold, 5, 1.5)
assert_raises(ValueError, cval.StratifiedKFold, y, 1.5)
def test_kfold_indices():
# Check all indices are returned in the test folds
kf = cval.KFold(300, 3)
all_folds = None
for train, test in kf:
if all_folds is None:
all_folds = test.copy()
else:
all_folds = np.concatenate((all_folds, test))
all_folds.sort()
assert_array_equal(all_folds, np.arange(300))
def test_kfold_balance():
# Check that KFold returns folds with balanced sizes
for kf in [cval.KFold(i, 5) for i in range(11, 17)]:
sizes = []
for _, test in kf:
sizes.append(len(test))
assert_true((np.max(sizes) - np.min(sizes)) <= 1)
assert_equal(np.sum(sizes), kf.n)
@ignore_warnings
def test_shuffle_kfold():
# Check the indices are shuffled properly, and that all indices are
# returned in the different test folds
kf1 = cval.KFold(300, 3, shuffle=True, random_state=0, indices=True)
kf2 = cval.KFold(300, 3, shuffle=True, random_state=0, indices=False)
ind = np.arange(300)
for kf in (kf1, kf2):
all_folds = None
for train, test in kf:
sorted_array = np.arange(100)
assert_true(np.any(sorted_array != ind[train]))
sorted_array = np.arange(101, 200)
assert_true(np.any(sorted_array != ind[train]))
sorted_array = np.arange(201, 300)
assert_true(np.any(sorted_array != ind[train]))
if all_folds is None:
all_folds = ind[test].copy()
else:
all_folds = np.concatenate((all_folds, ind[test]))
all_folds.sort()
assert_array_equal(all_folds, ind)
def test_shuffle_split():
ss1 = cval.ShuffleSplit(10, test_size=0.2, random_state=0)
ss2 = cval.ShuffleSplit(10, test_size=2, random_state=0)
ss3 = cval.ShuffleSplit(10, test_size=np.int32(2), random_state=0)
for typ in six.integer_types:
ss4 = cval.ShuffleSplit(10, test_size=typ(2), random_state=0)
for t1, t2, t3, t4 in zip(ss1, ss2, ss3, ss4):
assert_array_equal(t1[0], t2[0])
assert_array_equal(t2[0], t3[0])
assert_array_equal(t3[0], t4[0])
assert_array_equal(t1[1], t2[1])
assert_array_equal(t2[1], t3[1])
assert_array_equal(t3[1], t4[1])
def test_stratified_shuffle_split_init():
y = np.asarray([0, 1, 1, 1, 2, 2, 2])
# Check that error is raised if there is a class with only one sample
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, 3, 0.2)
# Check that error is raised if the test set size is smaller than n_classes
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, 3, 2)
# Check that error is raised if the train set size is smaller than
# n_classes
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, 3, 3, 2)
y = np.asarray([0, 0, 0, 1, 1, 1, 2, 2, 2])
# Check that errors are raised if there is not enough samples
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, 3, 0.5, 0.6)
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, 3, 8, 0.6)
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, 3, 0.6, 8)
# Train size or test size too small
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, train_size=2)
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, test_size=2)
def test_stratified_shuffle_split_iter():
ys = [np.array([1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 3, 3]),
np.array([0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3]),
np.array([0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2]),
np.array([1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4]),
np.array([-1] * 800 + [1] * 50)
]
for y in ys:
sss = cval.StratifiedShuffleSplit(y, 6, test_size=0.33,
random_state=0)
for train, test in sss:
assert_array_equal(unique(y[train]), unique(y[test]))
# Checks if folds keep classes proportions
p_train = (np.bincount(unique(y[train], return_inverse=True)[1]) /
float(len(y[train])))
p_test = (np.bincount(unique(y[test], return_inverse=True)[1]) /
float(len(y[test])))
assert_array_almost_equal(p_train, p_test, 1)
assert_equal(y[train].size + y[test].size, y.size)
assert_array_equal(np.lib.arraysetops.intersect1d(train, test), [])
@ignore_warnings
def test_stratified_shuffle_split_iter_no_indices():
y = np.asarray([0, 1, 2] * 10)
sss1 = cval.StratifiedShuffleSplit(y, indices=False, random_state=0)
train_mask, test_mask = next(iter(sss1))
sss2 = cval.StratifiedShuffleSplit(y, indices=True, random_state=0)
train_indices, test_indices = next(iter(sss2))
assert_array_equal(sorted(test_indices), np.where(test_mask)[0])
def test_leave_label_out_changing_labels():
"""Check that LeaveOneLabelOut and LeavePLabelOut work normally if
the labels variable is changed before calling __iter__"""
labels = np.array([0, 1, 2, 1, 1, 2, 0, 0])
labels_changing = np.array(labels, copy=True)
lolo = cval.LeaveOneLabelOut(labels)
lolo_changing = cval.LeaveOneLabelOut(labels_changing)
lplo = cval.LeavePLabelOut(labels, p=2)
lplo_changing = cval.LeavePLabelOut(labels_changing, p=2)
labels_changing[:] = 0
for llo, llo_changing in [(lolo, lolo_changing), (lplo, lplo_changing)]:
for (train, test), (train_chan, test_chan) in zip(llo, llo_changing):
assert_array_equal(train, train_chan)
assert_array_equal(test, test_chan)
def test_cross_val_score():
clf = MockClassifier()
for a in range(-10, 10):
clf.a = a
# Smoke test
scores = cval.cross_val_score(clf, X, y)
assert_array_equal(scores, clf.score(X, y))
# test with multioutput y
scores = cval.cross_val_score(clf, X_sparse, X)
assert_array_equal(scores, clf.score(X_sparse, X))
scores = cval.cross_val_score(clf, X_sparse, y)
assert_array_equal(scores, clf.score(X_sparse, y))
# test with multioutput y
scores = cval.cross_val_score(clf, X_sparse, X)
assert_array_equal(scores, clf.score(X_sparse, X))
# test with X as list
clf = MockListClassifier()
scores = cval.cross_val_score(clf, X.tolist(), y)
assert_raises(ValueError, cval.cross_val_score, clf, X, y,
scoring="sklearn")
def test_cross_val_score_precomputed():
# test for svm with precomputed kernel
svm = SVC(kernel="precomputed")
iris = load_iris()
X, y = iris.data, iris.target
linear_kernel = np.dot(X, X.T)
score_precomputed = cval.cross_val_score(svm, linear_kernel, y)
svm = SVC(kernel="linear")
score_linear = cval.cross_val_score(svm, X, y)
assert_array_equal(score_precomputed, score_linear)
# Error raised for non-square X
svm = SVC(kernel="precomputed")
assert_raises(ValueError, cval.cross_val_score, svm, X, y)
# test error is raised when the precomputed kernel is not array-like
# or sparse
assert_raises(ValueError, cval.cross_val_score, svm,
linear_kernel.tolist(), y)
def test_cross_val_score_fit_params():
clf = MockClassifier()
n_samples = X.shape[0]
n_classes = len(np.unique(y))
fit_params = {'sample_weight': np.ones(n_samples),
'class_prior': np.ones(n_classes) / n_classes}
cval.cross_val_score(clf, X, y, fit_params=fit_params)
def test_cross_val_score_score_func():
clf = MockClassifier()
_score_func_args = []
def score_func(y_test, y_predict):
_score_func_args.append((y_test, y_predict))
return 1.0
with warnings.catch_warnings(record=True):
score = cval.cross_val_score(clf, X, y, score_func=score_func)
assert_array_equal(score, [1.0, 1.0, 1.0])
assert len(_score_func_args) == 3
def test_cross_val_score_errors():
class BrokenEstimator:
pass
assert_raises(TypeError, cval.cross_val_score, BrokenEstimator(), X)
def test_train_test_split_errors():
assert_raises(ValueError, cval.train_test_split)
assert_raises(ValueError, cval.train_test_split, range(3), train_size=1.1)
assert_raises(ValueError, cval.train_test_split, range(3), test_size=0.6,
train_size=0.6)
assert_raises(ValueError, cval.train_test_split, range(3),
test_size=np.float32(0.6), train_size=np.float32(0.6))
assert_raises(ValueError, cval.train_test_split, range(3),
test_size="wrong_type")
assert_raises(ValueError, cval.train_test_split, range(3), test_size=2,
train_size=4)
assert_raises(TypeError, cval.train_test_split, range(3),
some_argument=1.1)
assert_raises(ValueError, cval.train_test_split, range(3), range(42))
def test_train_test_split():
X = np.arange(100).reshape((10, 10))
X_s = coo_matrix(X)
y = range(10)
split = cval.train_test_split(X, X_s, y)
X_train, X_test, X_s_train, X_s_test, y_train, y_test = split
assert_array_equal(X_train, X_s_train.toarray())
assert_array_equal(X_test, X_s_test.toarray())
assert_array_equal(X_train[:, 0], y_train * 10)
assert_array_equal(X_test[:, 0], y_test * 10)
split = cval.train_test_split(X, y, test_size=None, train_size=.5)
X_train, X_test, y_train, y_test = split
assert_equal(len(y_test), len(y_train))
def test_cross_val_score_with_score_func_classification():
iris = load_iris()
clf = SVC(kernel='linear')
# Default score (should be the accuracy score)
scores = cval.cross_val_score(clf, iris.data, iris.target, cv=5)
assert_array_almost_equal(scores, [1., 0.97, 0.90, 0.97, 1.], 2)
# Correct classification score (aka. zero / one score) - should be the
# same as the default estimator score
zo_scores = cval.cross_val_score(clf, iris.data, iris.target,
scoring="accuracy", cv=5)
assert_array_almost_equal(zo_scores, [1., 0.97, 0.90, 0.97, 1.], 2)
# F1 score (class are balanced so f1_score should be equal to zero/one
# score
f1_scores = cval.cross_val_score(clf, iris.data, iris.target,
scoring="f1", cv=5)
assert_array_almost_equal(f1_scores, [1., 0.97, 0.90, 0.97, 1.], 2)
# also test deprecated old way
with warnings.catch_warnings(record=True):
f1_scores = cval.cross_val_score(clf, iris.data, iris.target,
score_func=f1_score, cv=5)
assert_array_almost_equal(f1_scores, [1., 0.97, 0.90, 0.97, 1.], 2)
def test_cross_val_score_with_score_func_regression():
X, y = make_regression(n_samples=30, n_features=20, n_informative=5,
random_state=0)
reg = Ridge()
# Default score of the Ridge regression estimator
scores = cval.cross_val_score(reg, X, y, cv=5)
assert_array_almost_equal(scores, [0.94, 0.97, 0.97, 0.99, 0.92], 2)
# R2 score (aka. determination coefficient) - should be the
# same as the default estimator score
r2_scores = cval.cross_val_score(reg, X, y, scoring="r2", cv=5)
assert_array_almost_equal(r2_scores, [0.94, 0.97, 0.97, 0.99, 0.92], 2)
# Mean squared error; this is a loss function, so "scores" are negative
mse_scores = cval.cross_val_score(reg, X, y, cv=5,
scoring="mean_squared_error")
expected_mse = np.array([-763.07, -553.16, -274.38, -273.26, -1681.99])
assert_array_almost_equal(mse_scores, expected_mse, 2)
# Explained variance
with warnings.catch_warnings(record=True):
ev_scores = cval.cross_val_score(reg, X, y, cv=5,
score_func=explained_variance_score)
assert_array_almost_equal(ev_scores, [0.94, 0.97, 0.97, 0.99, 0.92], 2)
def test_permutation_score():
iris = load_iris()
X = iris.data
X_sparse = coo_matrix(X)
y = iris.target
svm = SVC(kernel='linear')
cv = cval.StratifiedKFold(y, 2)
score, scores, pvalue = cval.permutation_test_score(
svm, X, y, cv=cv, scoring="accuracy")
assert_greater(score, 0.9)
assert_almost_equal(pvalue, 0.0, 1)
score_label, _, pvalue_label = cval.permutation_test_score(
svm, X, y, cv=cv, scoring="accuracy", labels=np.ones(y.size),
random_state=0)
assert_true(score_label == score)
assert_true(pvalue_label == pvalue)
# test with custom scoring object
scorer = make_scorer(fbeta_score, beta=2)
score_label, _, pvalue_label = cval.permutation_test_score(
svm, X, y, scoring=scorer, cv=cv, labels=np.ones(y.size),
random_state=0)
assert_almost_equal(score_label, .95, 2)
assert_almost_equal(pvalue_label, 0.01, 3)
# check that we obtain the same results with a sparse representation
svm_sparse = SVC(kernel='linear')
cv_sparse = cval.StratifiedKFold(y, 2)
score_label, _, pvalue_label = cval.permutation_test_score(
svm_sparse, X_sparse, y, cv=cv_sparse,
scoring="accuracy", labels=np.ones(y.size), random_state=0)
assert_true(score_label == score)
assert_true(pvalue_label == pvalue)
# set random y
y = np.mod(np.arange(len(y)), 3)
score, scores, pvalue = cval.permutation_test_score(svm, X, y, cv=cv,
scoring="accuracy")
assert_less(score, 0.5)
assert_greater(pvalue, 0.4)
# test with deprecated interface
with warnings.catch_warnings(record=True):
score, scores, pvalue = cval.permutation_test_score(
svm, X, y, score_func=accuracy_score, cv=cv)
assert_less(score, 0.5)
assert_greater(pvalue, 0.4)
def test_cross_val_generator_with_mask():
X = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
y = np.array([1, 1, 2, 2])
labels = np.array([1, 2, 3, 4])
loo = assert_warns(DeprecationWarning, cval.LeaveOneOut,
4, indices=False)
lpo = assert_warns(DeprecationWarning, cval.LeavePOut,
4, 2, indices=False)
kf = assert_warns(DeprecationWarning, cval.KFold,
4, 2, indices=False)
skf = assert_warns(DeprecationWarning, cval.StratifiedKFold,
y, 2, indices=False)
lolo = assert_warns(DeprecationWarning, cval.LeaveOneLabelOut,
labels, indices=False)
lopo = assert_warns(DeprecationWarning, cval.LeavePLabelOut,
labels, 2, indices=False)
ss = assert_warns(DeprecationWarning, cval.ShuffleSplit,
4, indices=False)
for cv in [loo, lpo, kf, skf, lolo, lopo, ss]:
for train, test in cv:
assert_equal(np.asarray(train).dtype.kind, 'b')
assert_equal(np.asarray(train).dtype.kind, 'b')
X_train, X_test = X[train], X[test]
y_train, y_test = y[train], y[test]
def test_cross_val_generator_with_indices():
X = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
y = np.array([1, 1, 2, 2])
labels = np.array([1, 2, 3, 4])
# explicitly passing indices value is deprecated
loo = assert_warns(DeprecationWarning, cval.LeaveOneOut,
4, indices=True)
lpo = assert_warns(DeprecationWarning, cval.LeavePOut,
4, 2, indices=True)
kf = assert_warns(DeprecationWarning, cval.KFold,
4, 2, indices=True)
skf = assert_warns(DeprecationWarning, cval.StratifiedKFold,
y, 2, indices=True)
lolo = assert_warns(DeprecationWarning, cval.LeaveOneLabelOut,
labels, indices=True)
lopo = assert_warns(DeprecationWarning, cval.LeavePLabelOut,
labels, 2, indices=True)
b = cval.Bootstrap(2) # only in index mode
ss = assert_warns(DeprecationWarning, cval.ShuffleSplit,
2, indices=True)
for cv in [loo, lpo, kf, skf, lolo, lopo, b, ss]:
for train, test in cv:
assert_not_equal(np.asarray(train).dtype.kind, 'b')
assert_not_equal(np.asarray(train).dtype.kind, 'b')
X_train, X_test = X[train], X[test]
y_train, y_test = y[train], y[test]
def test_cross_val_generator_with_default_indices():
X = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
y = np.array([1, 1, 2, 2])
labels = np.array([1, 2, 3, 4])
loo = cval.LeaveOneOut(4)
lpo = cval.LeavePOut(4, 2)
kf = cval.KFold(4, 2)
skf = cval.StratifiedKFold(y, 2)
lolo = cval.LeaveOneLabelOut(labels)
lopo = cval.LeavePLabelOut(labels, 2)
b = cval.Bootstrap(2) # only in index mode
ss = cval.ShuffleSplit(2)
for cv in [loo, lpo, kf, skf, lolo, lopo, b, ss]:
for train, test in cv:
assert_not_equal(np.asarray(train).dtype.kind, 'b')
assert_not_equal(np.asarray(train).dtype.kind, 'b')
X_train, X_test = X[train], X[test]
y_train, y_test = y[train], y[test]
@ignore_warnings
def test_cross_val_generator_mask_indices_same():
# Test that the cross validation generators return the same results when
# indices=True and when indices=False
y = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2])
labels = np.array([1, 1, 2, 3, 3, 3, 4])
loo_mask = cval.LeaveOneOut(5, indices=False)
loo_ind = cval.LeaveOneOut(5, indices=True)
lpo_mask = cval.LeavePOut(10, 2, indices=False)
lpo_ind = cval.LeavePOut(10, 2, indices=True)
kf_mask = cval.KFold(10, 5, indices=False, shuffle=True, random_state=1)
kf_ind = cval.KFold(10, 5, indices=True, shuffle=True, random_state=1)
skf_mask = cval.StratifiedKFold(y, 3, indices=False)
skf_ind = cval.StratifiedKFold(y, 3, indices=True)
lolo_mask = cval.LeaveOneLabelOut(labels, indices=False)
lolo_ind = cval.LeaveOneLabelOut(labels, indices=True)
lopo_mask = cval.LeavePLabelOut(labels, 2, indices=False)
lopo_ind = cval.LeavePLabelOut(labels, 2, indices=True)
for cv_mask, cv_ind in [(loo_mask, loo_ind), (lpo_mask, lpo_ind),
(kf_mask, kf_ind), (skf_mask, skf_ind),
(lolo_mask, lolo_ind), (lopo_mask, lopo_ind)]:
for (train_mask, test_mask), (train_ind, test_ind) in \
zip(cv_mask, cv_ind):
assert_array_equal(np.where(train_mask)[0], train_ind)
assert_array_equal(np.where(test_mask)[0], test_ind)
def test_bootstrap_errors():
assert_raises(ValueError, cval.Bootstrap, 10, train_size=100)
assert_raises(ValueError, cval.Bootstrap, 10, test_size=100)
assert_raises(ValueError, cval.Bootstrap, 10, train_size=1.1)
assert_raises(ValueError, cval.Bootstrap, 10, test_size=1.1)
def test_bootstrap_test_sizes():
assert_equal(cval.Bootstrap(10, test_size=0.2).test_size, 2)
assert_equal(cval.Bootstrap(10, test_size=2).test_size, 2)
assert_equal(cval.Bootstrap(10, test_size=None).test_size, 5)
def test_shufflesplit_errors():
assert_raises(ValueError, cval.ShuffleSplit, 10, test_size=2.0)
assert_raises(ValueError, cval.ShuffleSplit, 10, test_size=1.0)
assert_raises(ValueError, cval.ShuffleSplit, 10, test_size=0.1,
train_size=0.95)
assert_raises(ValueError, cval.ShuffleSplit, 10, test_size=11)
assert_raises(ValueError, cval.ShuffleSplit, 10, test_size=10)
assert_raises(ValueError, cval.ShuffleSplit, 10, test_size=8, train_size=3)
assert_raises(ValueError, cval.ShuffleSplit, 10, train_size=1j)
assert_raises(ValueError, cval.ShuffleSplit, 10, test_size=None,
train_size=None)
def test_shufflesplit_reproducible():
# Check that iterating twice on the ShuffleSplit gives the same
# sequence of train-test when the random_state is given
ss = cval.ShuffleSplit(10, random_state=21)
assert_array_equal(list(a for a, b in ss), list(a for a, b in ss))
@ignore_warnings
def test_cross_indices_exception():
X = coo_matrix(np.array([[1, 2], [3, 4], [5, 6], [7, 8]]))
y = np.array([1, 1, 2, 2])
labels = np.array([1, 2, 3, 4])
loo = cval.LeaveOneOut(4, indices=False)
lpo = cval.LeavePOut(4, 2, indices=False)
kf = cval.KFold(4, 2, indices=False)
skf = cval.StratifiedKFold(y, 2, indices=False)
lolo = cval.LeaveOneLabelOut(labels, indices=False)
lopo = cval.LeavePLabelOut(labels, 2, indices=False)
assert_raises(ValueError, cval.check_cv, loo, X, y)
assert_raises(ValueError, cval.check_cv, lpo, X, y)
assert_raises(ValueError, cval.check_cv, kf, X, y)
assert_raises(ValueError, cval.check_cv, skf, X, y)
assert_raises(ValueError, cval.check_cv, lolo, X, y)
assert_raises(ValueError, cval.check_cv, lopo, X, y)
| B3AU/waveTree | sklearn/tests/test_cross_validation.py | Python | bsd-3-clause | 24,996 |
import numpy as np
np.random.seed(2016)
import os
import glob
import cv2
import datetime
import time
from sklearn.cross_validation import KFold
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation, Flatten
from keras.layers.convolutional import Convolution2D, MaxPooling2D
from keras.optimizers import Adam
from keras.optimizers import SGD
from keras.callbacks import EarlyStopping
from keras.utils import np_utils
from sklearn.metrics import log_loss
def get_im_cv2(path, img_rows, img_cols):
img = cv2.imread(path, 0)
resized = cv2.resize(img, (img_cols, img_rows), cv2.INTER_LINEAR)
return resized
def load_train(img_rows, img_cols):
X_train = []
X_train_id = []
mask_train = []
start_time = time.time()
print('Read train images')
files = glob.glob("../input/train/*[0-9].tif")
for fl in files:
flbase = os.path.basename(fl)
img = get_im_cv2(fl, img_rows, img_cols)
X_train.append(img)
X_train_id.append(flbase[:-4])
mask_path = "../input/train/" + flbase[:-4] + "_mask.tif"
mask = get_im_cv2(mask_path, img_rows, img_cols)
mask_train.append(mask)
print('Read train data time: {} seconds'.format(round(time.time() - start_time, 2)))
return X_train, mask_train, X_train_id
| khushhallchandra/Deep-Learning | kaggle/ultrasoundNerveReco/src/kerasTest.py | Python | mit | 1,325 |
# -*- coding: utf-8 -*-
"""#Versión de la calculadora."""
version = '1.0.0'
| Ryszard-Ps/rsr-calculator | rsr_calculator/version.py | Python | gpl-3.0 | 77 |
""" Openssl Elliptic Curve Parameters
Run ``openssl ecparam -list_curves`` to show all of the curve identifiers supported in OpenSSL.
import the ``charm.toolbox.eccurve`` module for the full listing from Charm.
"""
prime192v1 = 409
prime192v2 = 410
prime192v3 = 411
prime239v1 = 412
prime239v2 = 413
prime239v3 = 414
prime256v1 = 415
c2pnb163v1 = 684
c2pnb163v2 = 685
c2pnb163v3 = 686
c2pnb176v1 = 687
c2tnb191v1 = 688
c2tnb191v2 = 689
c2tnb191v3 = 690
c2onb191v4 = 691
c2onb191v5 = 692
c2pnb208w1 = 693
c2tnb239v1 = 694
c2tnb239v2 = 695
c2tnb239v3 = 696
c2onb239v4 = 697
c2onb239v5 = 698
c2pnb272w1 = 699
c2pnb304w1 = 700
c2tnb359v1 = 701
c2pnb368w1 = 702
c2tnb431r1 = 703
secp112r1 = 704
secp112r2 = 705
secp128r1 = 706
secp128r2 = 707
secp160k1 = 708
secp160r1 = 709
secp160r2 = 710
secp192k1 = 711
secp224k1 = 712
secp224r1 = 713
secp256k1 = 714
secp384r1 = 715
secp521r1 = 716
sect113r1 = 717
sect113r2 = 718
sect131r1 = 719
sect131r2 = 720
sect163k1 = 721
sect163r1 = 722
sect163r2 = 723
sect193r1 = 724
sect193r2 = 725
sect233k1 = 726
sect233r1 = 727
sect239k1 = 728
sect283k1 = 729
sect283r1 = 730
sect409k1 = 731
sect409r1 = 732
sect571k1 = 733
sect571r1 = 734
ecid_wtls1 = 735
ecid_wtls3 = 736
ecid_wtls4 = 737
ecid_wtls5 = 738
ecid_wtls6 = 739
ecid_wtls7 = 740
ecid_wtls8 = 741
ecid_wtls9 = 742
ecid_wtls10 = 743
ecid_wtls11 = 744
ecid_wtls12 = 745
curve_description = {
secp112r1 : 'SECG/WTLS curve over a 112 bit prime field',
secp112r2 : 'SECG curve over a 112 bit prime field',
secp128r1 : 'SECG curve over a 128 bit prime field',
secp128r2 : 'SECG curve over a 128 bit prime field',
secp160k1 : 'SECG curve over a 160 bit prime field',
secp160r1 : 'SECG curve over a 160 bit prime field',
secp160r2 : 'SECG/WTLS curve over a 160 bit prime field',
secp192k1 : 'SECG curve over a 192 bit prime field',
secp224k1 : 'SECG curve over a 224 bit prime field',
secp224r1 : 'NIST/SECG curve over a 224 bit prime field',
secp256k1 : 'SECG curve over a 256 bit prime field',
secp384r1 : 'NIST/SECG curve over a 384 bit prime field',
secp521r1 : 'NIST/SECG curve over a 521 bit prime field',
prime192v1: 'NIST/X9.62/SECG curve over a 192 bit prime field',
prime192v2: 'X9.62 curve over a 192 bit prime field',
prime192v3: 'X9.62 curve over a 192 bit prime field',
prime239v1: 'X9.62 curve over a 239 bit prime field',
prime239v2: 'X9.62 curve over a 239 bit prime field',
prime239v3: 'X9.62 curve over a 239 bit prime field',
prime256v1: 'X9.62/SECG curve over a 256 bit prime field',
sect113r1 : 'SECG curve over a 113 bit binary field',
sect113r2 : 'SECG curve over a 113 bit binary field',
sect131r1 : 'SECG/WTLS curve over a 131 bit binary field',
sect131r2 : 'SECG curve over a 131 bit binary field',
sect163k1 : 'NIST/SECG/WTLS curve over a 163 bit binary field',
sect163r1 : 'SECG curve over a 163 bit binary field',
sect163r2 : 'NIST/SECG curve over a 163 bit binary field',
sect193r1 : 'SECG curve over a 193 bit binary field',
sect193r2 : 'SECG curve over a 193 bit binary field',
sect233k1 : 'NIST/SECG/WTLS curve over a 233 bit binary field',
sect233r1 : 'NIST/SECG/WTLS curve over a 233 bit binary field',
sect239k1 : 'SECG curve over a 239 bit binary field',
sect283k1 : 'NIST/SECG curve over a 283 bit binary field',
sect283r1 : 'NIST/SECG curve over a 283 bit binary field',
sect409k1 : 'NIST/SECG curve over a 409 bit binary field',
sect409r1 : 'NIST/SECG curve over a 409 bit binary field',
sect571k1 : 'NIST/SECG curve over a 571 bit binary field',
sect571r1 : 'NIST/SECG curve over a 571 bit binary field',
c2pnb163v1: 'X9.62 curve over a 163 bit binary field',
c2pnb163v2: 'X9.62 curve over a 163 bit binary field',
c2pnb163v3: 'X9.62 curve over a 163 bit binary field',
c2pnb176v1: 'X9.62 curve over a 176 bit binary field',
c2tnb191v1: 'X9.62 curve over a 191 bit binary field',
c2tnb191v2: 'X9.62 curve over a 191 bit binary field',
c2tnb191v3: 'X9.62 curve over a 191 bit binary field',
c2pnb208w1: 'X9.62 curve over a 208 bit binary field',
c2tnb239v1: 'X9.62 curve over a 239 bit binary field',
c2tnb239v2: 'X9.62 curve over a 239 bit binary field',
c2tnb239v3: 'X9.62 curve over a 239 bit binary field',
c2pnb272w1: 'X9.62 curve over a 272 bit binary field',
c2pnb304w1: 'X9.62 curve over a 304 bit binary field',
c2tnb359v1: 'X9.62 curve over a 359 bit binary field',
c2pnb368w1: 'X9.62 curve over a 368 bit binary field',
c2tnb431r1: 'X9.62 curve over a 431 bit binary field',
ecid_wtls1: 'WTLS curve over a 113 bit binary field',
ecid_wtls3: 'NIST/SECG/WTLS curve over a 163 bit binary field',
ecid_wtls4: 'SECG curve over a 113 bit binary field',
ecid_wtls5: 'X9.62 curve over a 163 bit binary field',
ecid_wtls6: 'SECG/WTLS curve over a 112 bit prime field',
ecid_wtls7: 'SECG/WTLS curve over a 160 bit prime field',
ecid_wtls8: 'WTLS curve over a 112 bit prime field',
ecid_wtls9: 'WTLS curve over a 160 bit prime field',
ecid_wtls10:'NIST/SECG/WTLS curve over a 233 bit binary field',
ecid_wtls11:'NIST/SECG/WTLS curve over a 233 bit binary field',
ecid_wtls12:'WTLS curvs over a 224 bit prime field',
}
| JHUISI/charm | charm/toolbox/eccurve.py | Python | lgpl-3.0 | 5,184 |
# Author: Nic Wolfe <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
from collections import OrderedDict
import datetime
import io
import os.path
import re
from . import helpers as metadata_helpers
from .. import logger
import sg_helpers
from ..indexers import indexer_config
from ..indexers.indexer_config import TVINFO_TVDB, TVINFO_TMDB
from lib.tvinfo_base import TVInfoImage, TVInfoImageType, TVInfoImageSize
from lib.tvinfo_base.exceptions import *
import sickbeard
# noinspection PyPep8Naming
import encodingKludge as ek
from exceptions_helper import ex
from lib.fanart.core import Request as fanartRequest
import lib.fanart as fanart
from lxml_etree import etree
from _23 import filter_iter, list_keys
from six import iteritems, itervalues, string_types
# noinspection PyUnreachableCode
if False:
from typing import AnyStr, Dict, Generator, List, Optional, Tuple, Union
from lib.tvinfo_base import TVInfoShow
from ..tv import TVShow
map_image_types = {
'poster': TVInfoImageType.poster,
'banner': TVInfoImageType.banner,
'fanart': TVInfoImageType.fanart,
'poster_thumb': TVInfoImageType.poster,
'banner_thumb': TVInfoImageType.banner,
}
class ShowInfosDict(OrderedDict):
def __getitem__(self, k):
v = OrderedDict.__getitem__(self, k)
if callable(v):
v = v(k)
OrderedDict.__setitem__(self, k, v)
return v
class GenericMetadata(object):
"""
Base class for all metadata providers. Default behavior is meant to mostly
follow XBMC 12+ metadata standards. Has support for:
- show metadata file
- episode metadata file
- episode thumbnail
- show fanart
- show poster
- show banner
- season thumbnails (poster)
- season thumbnails (banner)
- season all poster
- season all banner
"""
def __init__(self,
show_metadata=False, # type: bool
episode_metadata=False, # type: bool
use_fanart=False, # type: bool
use_poster=False, # type: bool
use_banner=False, # type: bool
episode_thumbnails=False, # type: bool
season_posters=False, # type: bool
season_banners=False, # type: bool
season_all_poster=False, # type: bool
season_all_banner=False # type: bool
):
self.name = "Generic" # type: AnyStr
self._ep_nfo_extension = "nfo" # type: AnyStr
self._show_metadata_filename = "tvshow.nfo" # type: AnyStr
self.fanart_name = "fanart.jpg" # type: AnyStr
self.poster_name = "poster.jpg" # type: AnyStr
self.banner_name = "banner.jpg" # type: AnyStr
self.season_all_poster_name = "season-all-poster.jpg" # type: AnyStr
self.season_all_banner_name = "season-all-banner.jpg" # type: AnyStr
self.show_metadata = show_metadata
self.episode_metadata = episode_metadata
self.fanart = use_fanart
self.poster = use_poster
self.banner = use_banner
self.episode_thumbnails = episode_thumbnails
self.season_posters = season_posters
self.season_banners = season_banners
self.season_all_poster = season_all_poster
self.season_all_banner = season_all_banner
def get_config(self):
# type: (...) -> AnyStr
config_list = [self.show_metadata, self.episode_metadata, self.fanart, self.poster, self.banner,
self.episode_thumbnails, self.season_posters, self.season_banners, self.season_all_poster,
self.season_all_banner]
return '|'.join([str(int(x)) for x in config_list])
def get_id(self):
# type: (...) -> AnyStr
return GenericMetadata.makeID(self.name)
@staticmethod
def makeID(name):
# type: (AnyStr) -> AnyStr
name_id = re.sub("[+]", "plus", name)
name_id = re.sub(r"[^\w\d_]", "_", name_id).lower()
return name_id
def set_config(self, string):
# type: (AnyStr) -> None
config_list = [bool(int(x)) for x in string.split('|')]
self.show_metadata = config_list[0]
self.episode_metadata = config_list[1]
self.fanart = config_list[2]
self.poster = config_list[3]
self.banner = config_list[4]
self.episode_thumbnails = config_list[5]
self.season_posters = config_list[6]
self.season_banners = config_list[7]
self.season_all_poster = config_list[8]
self.season_all_banner = config_list[9]
def _has_show_metadata(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
result = ek.ek(os.path.isfile, self.get_show_file_path(show_obj))
logger.log(u"Checking if " + self.get_show_file_path(show_obj) + " exists: " + str(result), logger.DEBUG)
return result
def has_episode_metadata(self, ep_obj):
# type: (sickbeard.tv.TVEpisode) -> AnyStr
result = ek.ek(os.path.isfile, self.get_episode_file_path(ep_obj))
logger.log(u"Checking if " + self.get_episode_file_path(ep_obj) + " exists: " + str(result), logger.DEBUG)
return result
def _has_fanart(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
result = ek.ek(os.path.isfile, self.get_fanart_path(show_obj))
logger.log(u"Checking if " + self.get_fanart_path(show_obj) + " exists: " + str(result), logger.DEBUG)
return result
def _has_poster(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
result = ek.ek(os.path.isfile, self.get_poster_path(show_obj))
logger.log(u"Checking if " + self.get_poster_path(show_obj) + " exists: " + str(result), logger.DEBUG)
return result
def _has_banner(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
result = ek.ek(os.path.isfile, self.get_banner_path(show_obj))
logger.log(u"Checking if " + self.get_banner_path(show_obj) + " exists: " + str(result), logger.DEBUG)
return result
def has_episode_thumb(self, ep_obj):
# type: (sickbeard.tv.TVEpisode) -> AnyStr
location = self.get_episode_thumb_path(ep_obj)
result = None is not location and ek.ek(os.path.isfile, location)
if location:
logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG)
return result
def _has_season_poster(self, show_obj, season):
# type: (sickbeard.tv.TVShow,int) -> AnyStr
location = self.get_season_poster_path(show_obj, season)
result = None is not location and ek.ek(os.path.isfile, location)
if location:
logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG)
return result
def _has_season_banner(self, show_obj, season):
# type: (sickbeard.tv.TVShow,int) -> AnyStr
location = self.get_season_banner_path(show_obj, season)
result = None is not location and ek.ek(os.path.isfile, location)
if location:
logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG)
return result
def _has_season_all_poster(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
result = ek.ek(os.path.isfile, self.get_season_all_poster_path(show_obj))
logger.log(u"Checking if " + self.get_season_all_poster_path(show_obj) + " exists: " + str(result),
logger.DEBUG)
return result
def _has_season_all_banner(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
result = ek.ek(os.path.isfile, self.get_season_all_banner_path(show_obj))
logger.log(u"Checking if " + self.get_season_all_banner_path(show_obj) + " exists: " + str(result),
logger.DEBUG)
return result
@staticmethod
def get_show_year(show_obj, show_info, year_only=True):
# type: (sickbeard.tv.TVShow, Dict, bool) -> Optional[AnyStr]
if None is not getattr(show_info, 'firstaired', None):
try:
first_aired = datetime.datetime.strptime(show_info['firstaired'], '%Y-%m-%d')
if first_aired:
if year_only:
return str(first_aired.year)
return str(first_aired.date())
except (BaseException, Exception):
pass
if isinstance(show_obj, sickbeard.tv.TVShow):
if year_only and show_obj.startyear:
return '%s' % show_obj.startyear
if not show_obj.sxe_ep_obj.get(1, {}).get(1, None):
show_obj.get_all_episodes()
try:
first_ep_obj = show_obj.first_aired_regular_episode
except (BaseException, Exception):
first_ep_obj = None
if isinstance(first_ep_obj, sickbeard.tv.TVEpisode) \
and isinstance(first_ep_obj.airdate, datetime.date) and 1900 < first_ep_obj.airdate.year:
return '%s' % (first_ep_obj.airdate.year, first_ep_obj.airdate)[not year_only]
def get_show_file_path(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
return ek.ek(os.path.join, show_obj.location, self._show_metadata_filename)
def get_episode_file_path(self, ep_obj):
# type: (sickbeard.tv.TVEpisode) -> AnyStr
return sg_helpers.replace_extension(ep_obj.location, self._ep_nfo_extension)
def get_fanart_path(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
return ek.ek(os.path.join, show_obj.location, self.fanart_name)
def get_poster_path(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
return ek.ek(os.path.join, show_obj.location, self.poster_name)
def get_banner_path(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
return ek.ek(os.path.join, show_obj.location, self.banner_name)
def get_episode_thumb_path(self, ep_obj):
# type: (sickbeard.tv.TVEpisode) -> Optional[AnyStr]
"""
Returns the path where the episode thumbnail should be stored.
ep_obj: a TVEpisode instance for which to create the thumbnail
"""
if ek.ek(os.path.isfile, ep_obj.location):
tbn_filename = ep_obj.location.rpartition('.')
if '' == tbn_filename[0]:
tbn_filename = ep_obj.location
else:
tbn_filename = tbn_filename[0]
return tbn_filename + '-thumb.jpg'
def get_season_poster_path(self, show_obj, season):
# type: (sickbeard.tv.TVShow, int) -> AnyStr
"""
Returns the full path to the file for a given season poster.
show_obj: a TVShow instance for which to generate the path
season: a season number to be used for the path. Note that season 0
means specials.
"""
# Our specials thumbnail is, well, special
if 0 == season:
season_poster_filename = 'season-specials'
else:
season_poster_filename = 'season' + str(season).zfill(2)
return ek.ek(os.path.join, show_obj.location, season_poster_filename + '-poster.jpg')
def get_season_banner_path(self, show_obj, season):
# type: (sickbeard.tv.TVShow, int) -> AnyStr
"""
Returns the full path to the file for a given season banner.
show_obj: a TVShow instance for which to generate the path
season: a season number to be used for the path. Note that season 0
means specials.
"""
# Our specials thumbnail is, well, special
if 0 == season:
season_banner_filename = 'season-specials'
else:
season_banner_filename = 'season' + str(season).zfill(2)
return ek.ek(os.path.join, show_obj.location, season_banner_filename + '-banner.jpg')
def get_season_all_poster_path(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
return ek.ek(os.path.join, show_obj.location, self.season_all_poster_name)
def get_season_all_banner_path(self, show_obj):
# type: (sickbeard.tv.TVShow) -> AnyStr
return ek.ek(os.path.join, show_obj.location, self.season_all_banner_name)
def _show_data(self, show_obj):
# type: (sickbeard.tv.TVShow) -> Optional[Union[bool, etree.Element]]
"""
This should be overridden by the implementing class. It should
provide the content of the show metadata file.
"""
return None
@staticmethod
def _valid_show(fetched_show_info, show_obj):
# type: (Dict, sickbeard.tv.TVShow) -> bool
"""
Test the integrity of fetched show data
:param fetched_show_info: the object returned from the tvinfo source
:param show_obj: Show that the fetched data relates to
:return: True if fetched_show_obj is valid data otherwise False
"""
if not (isinstance(fetched_show_info, dict) and
isinstance(getattr(fetched_show_info, 'data', None), (list, dict)) and
'seriesname' in getattr(fetched_show_info, 'data', [])) and \
not hasattr(fetched_show_info, 'seriesname'):
logger.log(u'Show %s not found on %s ' %
(show_obj.name, sickbeard.TVInfoAPI(show_obj.tvid).name), logger.WARNING)
return False
return True
def _ep_data(self, ep_obj):
# type: (sickbeard.tv.TVEpisode) -> Optional[Union[bool, etree.Element]]
"""
This should be overridden by the implementing class. It should
provide the content of the episode metadata file.
"""
return None
def create_show_metadata(self, show_obj, force=False):
# type: (sickbeard.tv.TVShow, bool) -> bool
result = False
if self.show_metadata and show_obj and (not self._has_show_metadata(show_obj) or force):
logger.debug('Metadata provider %s creating show metadata for %s' % (self.name, show_obj.unique_name))
try:
result = self.write_show_file(show_obj)
except BaseTVinfoError as e:
logger.log('Unable to find useful show metadata for %s on %s: %s' % (
self.name, sickbeard.TVInfoAPI(show_obj.tvid).name, ex(e)), logger.WARNING)
return result
def create_episode_metadata(self, ep_obj, force=False):
# type: (sickbeard.tv.TVEpisode, bool) -> bool
result = False
if self.episode_metadata and ep_obj and (not self.has_episode_metadata(ep_obj) or force):
logger.log('Metadata provider %s creating episode metadata for %s' % (self.name, ep_obj.pretty_name()),
logger.DEBUG)
try:
result = self.write_ep_file(ep_obj)
except BaseTVinfoError as e:
logger.log('Unable to find useful episode metadata for %s on %s: %s' % (
self.name, sickbeard.TVInfoAPI(ep_obj.show_obj.tvid).name, ex(e)), logger.WARNING)
return result
def update_show_indexer_metadata(self, show_obj):
# type: (sickbeard.tv.TVShow) -> bool
if self.show_metadata and show_obj and self._has_show_metadata(show_obj):
logger.debug(u'Metadata provider %s updating show indexer metadata file for %s' % (
self.name, show_obj.unique_name))
nfo_file_path = self.get_show_file_path(show_obj)
with ek.ek(io.open, nfo_file_path, 'r', encoding='utf8') as xmlFileObj:
show_xml = etree.ElementTree(file=xmlFileObj)
tvid = show_xml.find('indexer')
prodid = show_xml.find('id')
root = show_xml.getroot()
show_tvid = str(show_obj.tvid)
if None is not tvid:
tvid.text = '%s' % show_tvid
else:
etree.SubElement(root, 'indexer').text = '%s' % show_tvid
show_prodid = str(show_obj.prodid)
if None is not prodid:
prodid.text = '%s' % show_prodid
else:
etree.SubElement(root, 'id').text = '%s' % show_prodid
# Make it purdy
sg_helpers.indent_xml(root)
sg_helpers.write_file(nfo_file_path, show_xml, xmltree=True, utf8=True)
return True
def create_fanart(self, show_obj):
# type: (sickbeard.tv.TVShow) -> bool
if self.fanart and show_obj and not self._has_fanart(show_obj):
logger.debug(u'Metadata provider %s creating fanart for %s' % (self.name, show_obj.unique_name))
return self.save_fanart(show_obj)
return False
def create_poster(self, show_obj):
# type: (sickbeard.tv.TVShow) -> bool
if self.poster and show_obj and not self._has_poster(show_obj):
logger.debug(u'Metadata provider %s creating poster for %s' % (self.name, show_obj.unique_name))
return self.save_poster(show_obj)
return False
def create_banner(self, show_obj):
# type: (sickbeard.tv.TVShow) -> bool
if self.banner and show_obj and not self._has_banner(show_obj):
logger.debug(u'Metadata provider %s creating banner for %s' % (self.name, show_obj.unique_name))
return self.save_banner(show_obj)
return False
def create_episode_thumb(self, ep_obj):
# type: (sickbeard.tv.TVEpisode) -> bool
if self.episode_thumbnails and ep_obj and not self.has_episode_thumb(ep_obj):
logger.log(u"Metadata provider " + self.name + " creating episode thumbnail for " + ep_obj.pretty_name(),
logger.DEBUG)
return self.save_thumbnail(ep_obj)
return False
def create_season_posters(self, show_obj):
# type: (sickbeard.tv.TVShow) -> bool
if self.season_posters and show_obj:
result = []
for season, _ in iteritems(show_obj.sxe_ep_obj):
if not self._has_season_poster(show_obj, season):
logger.debug(u'Metadata provider %s creating season posters for %s' % (
self.name, show_obj.unique_name))
result = result + [self.save_season_posters(show_obj, season)]
return all(result)
return False
def create_season_banners(self, show_obj):
# type: (sickbeard.tv.TVShow) -> bool
if self.season_banners and show_obj:
result = []
for season, _ in iteritems(show_obj.sxe_ep_obj):
if not self._has_season_banner(show_obj, season):
logger.debug(u'Metadata provider %s creating season banners for %s' % (
self.name, show_obj.unique_name))
result = result + [self.save_season_banners(show_obj, season)]
return all(result)
return False
def create_season_all_poster(self, show_obj):
# type: (sickbeard.tv.TVShow) -> bool
if self.season_all_poster and show_obj and not self._has_season_all_poster(show_obj):
logger.debug(u'Metadata provider %s creating season all posters for %s' % (
self.name, show_obj.unique_name))
return self.save_season_all_poster(show_obj)
return False
def create_season_all_banner(self, show_obj):
# type: (sickbeard.tv.TVShow) -> bool
if self.season_all_banner and show_obj and not self._has_season_all_banner(show_obj):
logger.debug(u'Metadata provider %s creating season all banner for %s' % (
self.name, show_obj.unique_name))
return self.save_season_all_banner(show_obj)
return False
@staticmethod
def _get_episode_thumb_url(ep_obj):
# type: (sickbeard.tv.TVEpisode) -> Optional[AnyStr]
"""
Returns the URL to use for downloading an episode's thumbnail. Uses
theTVDB.com and TVRage.com data.
:param ep_obj: a TVEpisode object for which to grab the thumb URL
:return: URL to thumb
"""
ep_obj_list = [ep_obj] + ep_obj.related_ep_obj
# validate show
from .. import helpers
if not helpers.validate_show(ep_obj.show_obj):
return None
# try all included episodes in case some have thumbs and others don't
for cur_ep_obj in ep_obj_list:
if TVINFO_TVDB == cur_ep_obj.show_obj.tvid:
show_lang = cur_ep_obj.show_obj.lang
try:
tvinfo_config = sickbeard.TVInfoAPI(TVINFO_TVDB).api_params.copy()
tvinfo_config['dvdorder'] = 0 != cur_ep_obj.show_obj.dvdorder
tvinfo_config['no_dummy'] = True
if show_lang and not 'en' == show_lang:
tvinfo_config['language'] = show_lang
t = sickbeard.TVInfoAPI(TVINFO_TVDB).setup(**tvinfo_config)
ep_info = t[cur_ep_obj.show_obj.prodid][cur_ep_obj.season][cur_ep_obj.episode]
except (BaseTVinfoEpisodenotfound, BaseTVinfoSeasonnotfound, TypeError):
ep_info = None
else:
ep_info = helpers.validate_show(cur_ep_obj.show_obj, cur_ep_obj.season, cur_ep_obj.episode)
if not ep_info:
continue
thumb_url = getattr(ep_info, 'filename', None) \
or (isinstance(ep_info, dict) and ep_info.get('filename', None))
if thumb_url not in (None, False, ''):
return thumb_url
return None
def write_show_file(self, show_obj):
# type: (sickbeard.tv.TVShow) -> bool
"""
Generates and writes show_obj's metadata under the given path to the
filename given by get_show_file_path()
show_obj: TVShow object for which to create the metadata
path: An absolute or relative path where we should put the file. Note that
the file name will be the default show_file_name.
Note that this method expects that _show_data will return an ElementTree
object. If your _show_data returns data in another format you'll need to
override this method.
"""
data = self._show_data(show_obj)
if not data:
return False
nfo_file_path = self.get_show_file_path(show_obj)
logger.log(u'Writing show metadata file: %s' % nfo_file_path, logger.DEBUG)
return sg_helpers.write_file(nfo_file_path, data, xmltree=True, utf8=True)
def write_ep_file(self, ep_obj):
# type: (sickbeard.tv.TVEpisode) -> bool
"""
Generates and writes ep_obj's metadata under the given path with the
given filename root. Uses the episode's name with the extension in
_ep_nfo_extension.
ep_obj: TVEpisode object for which to create the metadata
file_name_path: The file name to use for this metadata. Note that the extension
will be automatically added based on _ep_nfo_extension. This should
include an absolute path.
Note that this method expects that _ep_data will return an ElementTree
object. If your _ep_data returns data in another format you'll need to
override this method.
"""
data = self._ep_data(ep_obj)
if not data:
return False
nfo_file_path = self.get_episode_file_path(ep_obj)
logger.log(u'Writing episode metadata file: %s' % nfo_file_path, logger.DEBUG)
return sg_helpers.write_file(nfo_file_path, data, xmltree=True, utf8=True)
def save_thumbnail(self, ep_obj):
# type: (sickbeard.tv.TVEpisode) -> bool
"""
Retrieves a thumbnail and saves it to the correct spot. This method should not need to
be overridden by implementing classes, changing get_episode_thumb_path and
_get_episode_thumb_url should suffice.
ep_obj: a TVEpisode object for which to generate a thumbnail
"""
file_path = self.get_episode_thumb_path(ep_obj)
if not file_path:
logger.log(u"Unable to find a file path to use for this thumbnail, not generating it", logger.DEBUG)
return False
thumb_url = self._get_episode_thumb_url(ep_obj)
# if we can't find one then give up
if not thumb_url:
logger.log(u"No thumb is available for this episode, not creating a thumb", logger.DEBUG)
return False
thumb_data = metadata_helpers.getShowImage(thumb_url, show_name=ep_obj.show_obj.name)
result = self._write_image(thumb_data, file_path)
if not result:
return False
for cur_ep_obj in [ep_obj] + ep_obj.related_ep_obj:
cur_ep_obj.hastbn = True
return True
def save_fanart(self, show_obj, which=None):
# type: (sickbeard.tv.TVShow, Optional[AnyStr]) -> bool
"""
Downloads a fanart image and saves it to the filename specified by fanart_name
inside the show's root folder.
show_obj: a TVShow object for which to download fanart
"""
# use the default fanart name
fanart_path = self.get_fanart_path(show_obj)
fanart_data = self._retrieve_show_image('fanart', show_obj, which,
img_cache_type=sickbeard.image_cache.ImageCache.FANART)
if not fanart_data:
logger.log(u"No fanart image was retrieved, unable to write fanart", logger.DEBUG)
return False
return self._write_image(fanart_data, fanart_path)
def save_poster(self, show_obj, which=None):
# type: (sickbeard.tv.TVShow, Optional[AnyStr]) -> bool
"""
Downloads a poster image and saves it to the filename specified by poster_name
inside the show's root folder.
show_obj: a TVShow object for which to download a poster
"""
# use the default poster name
poster_path = self.get_poster_path(show_obj)
poster_data = self._retrieve_show_image('poster', show_obj, which,
img_cache_type=sickbeard.image_cache.ImageCache.POSTER)
if not poster_data:
logger.log(u"No show poster image was retrieved, unable to write poster", logger.DEBUG)
return False
return self._write_image(poster_data, poster_path)
def save_banner(self, show_obj, which=None):
# type: (sickbeard.tv.TVShow, Optional[AnyStr]) -> bool
"""
Downloads a banner image and saves it to the filename specified by banner_name
inside the show's root folder.
show_obj: a TVShow object for which to download a banner
"""
# use the default banner name
banner_path = self.get_banner_path(show_obj)
banner_data = self._retrieve_show_image('banner', show_obj, which,
img_cache_type=sickbeard.image_cache.ImageCache.BANNER)
if not banner_data:
logger.log(u"No show banner image was retrieved, unable to write banner", logger.DEBUG)
return False
return self._write_image(banner_data, banner_path)
def save_season_posters(self, show_obj, season):
# type: (sickbeard.tv.TVShow, int) -> bool
"""
Saves all season posters to disk for the given show.
show_obj: a TVShow object for which to save the season thumbs
Cycles through all seasons and saves the season posters if possible.
"""
season_dict = self._season_image_dict(show_obj, season, 'seasons')
result = []
# Returns a nested dictionary of season art with the season
# number as primary key. It's really overkill but gives the option
# to present to user via ui to pick down the road.
for cur_season in season_dict:
cur_season_art = season_dict[cur_season]
if 0 == len(cur_season_art):
continue
# Just grab whatever's there for now
art_id, season_url = cur_season_art.popitem()
season_poster_file_path = self.get_season_poster_path(show_obj, cur_season)
if not season_poster_file_path:
logger.log(u'Path for season ' + str(cur_season) + ' came back blank, skipping this season',
logger.DEBUG)
continue
season_data = metadata_helpers.getShowImage(season_url, show_name=show_obj.name)
if not season_data:
logger.log(u'No season poster data available, skipping this season', logger.DEBUG)
continue
result = result + [self._write_image(season_data, season_poster_file_path)]
if result:
return all(result)
return False
def save_season_banners(self, show_obj, season):
# type: (sickbeard.tv.TVShow, int) -> bool
"""
Saves all season banners to disk for the given show.
show_obj: a TVShow object for which to save the season thumbs
Cycles through all seasons and saves the season banners if possible.
"""
season_dict = self._season_image_dict(show_obj, season, 'seasonwides')
result = []
# Returns a nested dictionary of season art with the season
# number as primary key. It's really overkill but gives the option
# to present to user via ui to pick down the road.
for cur_season in season_dict:
cur_season_art = season_dict[cur_season]
if 0 == len(cur_season_art):
continue
# Just grab whatever's there for now
art_id, season_url = cur_season_art.popitem()
season_banner_file_path = self.get_season_banner_path(show_obj, cur_season)
if not season_banner_file_path:
logger.log(u'Path for season ' + str(cur_season) + ' came back blank, skipping this season',
logger.DEBUG)
continue
season_data = metadata_helpers.getShowImage(season_url, show_name=show_obj.name)
if not season_data:
logger.log(u'No season banner data available, skipping this season', logger.DEBUG)
continue
result = result + [self._write_image(season_data, season_banner_file_path)]
if result:
return all(result)
return False
def save_season_all_poster(self, show_obj, which=None):
# type: (sickbeard.tv.TVShow, Optional[AnyStr]) -> bool
# use the default season all poster name
poster_path = self.get_season_all_poster_path(show_obj)
poster_data = self._retrieve_show_image('poster', show_obj, which,
img_cache_type=sickbeard.image_cache.ImageCache.POSTER)
if not poster_data:
logger.log(u"No show poster image was retrieved, unable to write season all poster", logger.DEBUG)
return False
return self._write_image(poster_data, poster_path)
def save_season_all_banner(self, show_obj, which=None):
# type: (sickbeard.tv.TVShow, Optional[AnyStr]) -> bool
# use the default season all banner name
banner_path = self.get_season_all_banner_path(show_obj)
banner_data = self._retrieve_show_image('banner', show_obj, which,
img_cache_type=sickbeard.image_cache.ImageCache.BANNER)
if not banner_data:
logger.log(u"No show banner image was retrieved, unable to write season all banner", logger.DEBUG)
return False
return self._write_image(banner_data, banner_path)
@staticmethod
def _write_image(image_data, image_path, force=False):
# type: (bytes, AnyStr, bool) -> bool
"""
Saves the data in image_data to the location image_path. Returns True/False
to represent success or failure.
image_data: binary image data to write to file
image_path: file location to save the image to
"""
# don't bother overwriting it
if not force and ek.ek(os.path.isfile, image_path):
logger.log(u"Image already exists, not downloading", logger.DEBUG)
return False
if not image_data:
logger.log(u"Unable to retrieve image, skipping", logger.WARNING)
return False
image_dir = ek.ek(os.path.dirname, image_path)
try:
if not ek.ek(os.path.isdir, image_dir):
logger.log(u"Metadata dir didn't exist, creating it at " + image_dir, logger.DEBUG)
ek.ek(os.makedirs, image_dir)
sg_helpers.chmod_as_parent(image_dir)
outFile = ek.ek(open, image_path, 'wb')
outFile.write(image_data)
outFile.close()
sg_helpers.chmod_as_parent(image_path)
except IOError as e:
logger.log(
u"Unable to write image to " + image_path + " - are you sure the show folder is writable? " + ex(e),
logger.ERROR)
return False
return True
@staticmethod
def gen_show_infos_dict(show_obj):
# type: (TVShow) -> ShowInfosDict
show_infos = ShowInfosDict()
def _get_show_info(tv_id):
try:
show_lang = show_obj.lang
# There's gotta be a better way of doing this but we don't wanna
# change the language value elsewhere
tvinfo_config = sickbeard.TVInfoAPI(tv_id).api_params.copy()
tvinfo_config['fanart'] = True
tvinfo_config['posters'] = True
tvinfo_config['banners'] = True
tvinfo_config['dvdorder'] = 0 != show_obj.dvdorder
if show_lang and not 'en' == show_lang:
tvinfo_config['language'] = show_lang
t = sickbeard.TVInfoAPI(tv_id).setup(**tvinfo_config)
return t.get_show((show_obj.ids[tv_id]['id'], show_obj.prodid)[tv_src == show_obj.tvid],
load_episodes=False, banners=False, posters=False, fanart=True)
except (BaseTVinfoError, IOError) as e:
logger.log(u"Unable to look up show on " + sickbeard.TVInfoAPI(
tv_id).name + ", not downloading images: " + ex(e), logger.WARNING)
# todo: when tmdb is added as tv source remove the hardcoded TVINFO_TMDB
for tv_src in list(OrderedDict.fromkeys([show_obj.tvid] + list_keys(sickbeard.TVInfoAPI().search_sources) +
[TVINFO_TMDB])):
if tv_src != show_obj.tvid and not show_obj.ids.get(tv_src, {}).get('id'):
continue
if tv_src == show_obj.tvid:
show_infos[tv_src] = _get_show_info(tv_src)
else:
show_infos[tv_src] = _get_show_info
return show_infos
def _retrieve_image_urls(self, show_obj, image_type, show_infos):
# type: (TVShow, AnyStr, TVInfoShow) -> Generator
image_urls, alt_tvdb_urls, fanart_fetched, de_dupe, show_lang = [], [], False, set(), show_obj.lang
def build_url(s_o, image_mode):
_urls = [[], []]
_url = s_o[image_mode]
if _url and _url.startswith('http'):
if 'poster' == image_mode:
_url = re.sub('posters', '_cache/posters', _url)
elif 'banner' == image_mode:
_url = re.sub('graphical', '_cache/graphical', _url)
_urls[0].append(_url)
try:
alt_url = '%swww.%s%s' % re.findall(
r'(https?://)(?:artworks\.)?(thetvdb\.[^/]+/banners/[^\d]+[^.]+)(?:_t)(.*)', _url)[0][0:3]
if alt_url not in _urls[0]:
_urls[1].append(alt_url)
except (IndexError, Exception):
try:
alt_url = '%sartworks.%s_t%s' % re.findall(
r'(https?://)(?:www\.)?(thetvdb\.[^/]+/banners/[^\d]+[^.]+)(.*)', _url)[0][0:3]
if alt_url not in _urls[0]:
_urls[1].append(alt_url)
except (IndexError, Exception):
pass
return _urls
def _get_fanart_tv():
return [_de_dupe((f_item[2], (f_item[2], f_item[2]))[image_type in ('poster', 'banner')])
for f_item in self._fanart_urls_from_show(show_obj, image_type, show_lang) or []]
def _de_dupe(images_list):
# type:(Union[List[AnyStr], AnyStr]) -> Optional[Union[List[AnyStr], AnyStr]]
if not isinstance(images_list, list):
return_list = False
temp_list = [images_list]
else:
return_list = True
temp_list = images_list
images_list = [i for i in temp_list if i not in de_dupe]
[de_dupe.add(_i) for _i in images_list]
if not return_list:
if images_list:
return images_list[0]
return None
return images_list
if image_type.startswith('fanart'):
for r in _get_fanart_tv():
yield r
for tv_src in show_infos:
if not self._valid_show(show_infos[tv_src], show_obj):
continue
if 'poster_thumb' == image_type:
if None is not getattr(show_infos[tv_src], image_type, None):
image_urls, alt_tvdb_urls = build_url(show_infos[tv_src], image_type)
elif None is not getattr(show_infos[tv_src], 'poster', None):
image_urls, alt_tvdb_urls = build_url(show_infos[tv_src], 'poster')
image_urls, alt_tvdb_urls = _de_dupe(image_urls), _de_dupe(alt_tvdb_urls)
for item in image_urls + alt_tvdb_urls:
yield item
elif 'banner_thumb' == image_type:
if None is not getattr(show_infos[tv_src], image_type, None):
image_urls, alt_tvdb_urls = build_url(show_infos[tv_src], image_type)
elif None is not getattr(show_infos[tv_src], 'banner', None):
image_urls, alt_tvdb_urls = build_url(show_infos[tv_src], 'banner')
image_urls, alt_tvdb_urls = _de_dupe(image_urls), _de_dupe(alt_tvdb_urls)
for item in image_urls + alt_tvdb_urls:
yield item
else:
if None is not getattr(show_infos[tv_src], image_type, None):
image_url = show_infos[tv_src][image_type]
if image_type in ('poster', 'banner'):
if None is not getattr(show_infos[tv_src], '%s_thumb' % image_type, None):
thumb_url = show_infos[tv_src]['%s_thumb' % image_type]
else:
thumb_url = image_url
else:
thumb_url = None
if image_url:
r = _de_dupe(((image_url, thumb_url), image_url)[None is thumb_url])
if r:
yield r
# check extra provided images in '_banners' key
if None is not getattr(show_infos[tv_src], '_banners', None) and \
isinstance(show_infos[tv_src]['_banners'].get(image_type, None), (list, dict)):
for res, value in iteritems(show_infos[tv_src]['_banners'][image_type]):
for item in itervalues(value):
thumb = item['thumbnailpath']
if not thumb:
thumb = item['bannerpath']
r = _de_dupe((item['bannerpath'], (item['bannerpath'], thumb))[
image_type in ('poster', 'banner')])
if r:
yield r
# extra images via images property
tvinfo_type = map_image_types.get(image_type)
tvinfo_size = (TVInfoImageSize.original, TVInfoImageSize.medium)['_thumb' in image_type]
if tvinfo_type and getattr(show_infos[tv_src], 'images', None) and \
show_infos[tv_src].images.get(tvinfo_type):
for img in show_infos[tv_src].images[tvinfo_type]: # type: TVInfoImage
for img_size, img_url in iteritems(img.sizes):
if tvinfo_size == img_size:
img_url = _de_dupe(img_url)
if not img_url:
continue
if image_type in ('poster', 'banner'):
thumb_url = img.sizes.get(TVInfoImageSize.medium, img_url)
if thumb_url:
thumb_url = _de_dupe(thumb_url)
if not thumb_url:
thumb_url = img_url
yield (img_url, thumb_url)
elif img_url:
yield img_url
if not image_type.startswith('fanart'):
for r in _get_fanart_tv():
yield r
def _retrieve_show_image(self,
image_type, # type: AnyStr
show_obj, # type: sickbeard.tv.TVShow
which=None, # type: int
return_links=False, # type: bool
show_infos=None, # type: ShowInfosDict
img_cache_type=None # type: int
):
# type: (...) -> Optional[bytes, List[AnyStr]]
"""
Gets an image URL from theTVDB.com, fanart.tv and TMDB.com, downloads it and returns the data.
If type is fanart, multiple image src urls are returned instead of a single data image.
image_type: type of image to retrieve (currently supported: fanart, poster, banner, poster_thumb, banner_thumb)
show_obj: a TVShow object to use when searching for the image
which: optional, a specific numbered poster to look for
Returns: the binary image data if available, or else None
"""
if not show_infos:
show_infos = self.gen_show_infos_dict(show_obj)
if 'fanart_all' == image_type:
return_links = True
image_type = 'fanart'
if image_type not in ('poster', 'banner', 'fanart', 'poster_thumb', 'banner_thumb'):
logger.log(u"Invalid image type " + str(image_type) + ", couldn't find it in the " + sickbeard.TVInfoAPI(
show_obj.tvid).name + " object", logger.ERROR)
return
image_urls = self._retrieve_image_urls(show_obj, image_type, show_infos)
if image_urls:
if return_links:
return image_urls
else:
img_data = None
image_cache = sickbeard.image_cache.ImageCache()
for image_url in image_urls or []:
if image_type in ('poster', 'banner'):
if isinstance(image_url, tuple):
image_url = image_url[0]
img_data = metadata_helpers.getShowImage(image_url, which, show_obj.name)
if img_cache_type and img_cache_type != image_cache.which_type(img_data, is_binary=True):
img_data = None
continue
if None is not img_data:
break
if None is not img_data:
return img_data
def _season_image_dict(self, show_obj, season, image_type):
# type: (sickbeard.tv.TVShow, int, AnyStr) -> Dict[int, Dict[int, AnyStr]]
"""
image_type : Type of image to fetch, 'seasons' or 'seasonwides'
image_type type : String
Should return a dict like:
result = {<season number>:
{1: '<url 1>', 2: <url 2>, ...},}
"""
result = {}
try:
# There's gotta be a better way of doing this but we don't wanna
# change the language value elsewhere
tvinfo_config = sickbeard.TVInfoAPI(show_obj.tvid).api_params.copy()
tvinfo_config[image_type] = True
tvinfo_config['dvdorder'] = 0 != show_obj.dvdorder
if 'en' != getattr(show_obj, 'lang', None):
tvinfo_config['language'] = show_obj.lang
t = sickbeard.TVInfoAPI(show_obj.tvid).setup(**tvinfo_config)
tvinfo_obj_show = t[show_obj.prodid]
except (BaseTVinfoError, IOError) as e:
logger.log(u'Unable to look up show on ' + sickbeard.TVInfoAPI(
show_obj.tvid).name + ', not downloading images: ' + ex(e), logger.WARNING)
return result
if not self._valid_show(tvinfo_obj_show, show_obj):
return result
season_images = getattr(tvinfo_obj_show, 'banners', {}).get(
('season', 'seasonwide')['seasonwides' == image_type], {}).get(season, {})
for image_id in season_images:
if season not in result:
result[season] = {}
result[season][image_id] = season_images[image_id]['bannerpath']
return result
def retrieveShowMetadata(self, folder):
# type: (AnyStr) -> Union[Tuple[int, int, AnyStr], Tuple[None, None, None]]
"""
Used only when mass adding Existing Shows,
using previously generated Show metadata to reduce the need to query TVDB.
"""
from sickbeard.indexers.indexer_config import TVINFO_TVDB
empty_return = (None, None, None)
metadata_path = ek.ek(os.path.join, folder, self._show_metadata_filename)
if not ek.ek(os.path.isdir, folder) or not ek.ek(os.path.isfile, metadata_path):
logger.log(u"Can't load the metadata file from " + repr(metadata_path) + ", it doesn't exist", logger.DEBUG)
return empty_return
logger.log(u"Loading show info from metadata file in " + folder, logger.DEBUG)
try:
with ek.ek(io.open, metadata_path, 'r', encoding='utf8') as xmlFileObj:
showXML = etree.ElementTree(file=xmlFileObj)
if None is showXML.findtext('title') \
or all(None is _f for _f in (showXML.find('//uniqueid[@type]'),
showXML.findtext('tvdbid'),
showXML.findtext('id'),
showXML.findtext('indexer'))):
logger.log(u"Invalid info in tvshow.nfo (missing name or id):"
+ str(showXML.findtext('title')) + ' '
+ str(showXML.findtext('indexer')) + ' '
+ str(showXML.findtext('tvdbid')) + ' '
+ str(showXML.findtext('id')))
return empty_return
name = showXML.findtext('title')
try:
tvid = int(showXML.findtext('indexer'))
except (BaseException, Exception):
tvid = None
# handle v2 format of .nfo file
default_source = showXML.find('//uniqueid[@default="true"]')
if None is not default_source:
use_tvid = default_source.attrib.get('type') or tvid
if isinstance(use_tvid, string_types):
use_tvid = {sickbeard.TVInfoAPI(x).config['slug']: x
for x, _ in iteritems(sickbeard.TVInfoAPI().all_sources)}.get(use_tvid)
prodid = sg_helpers.try_int(default_source.text, None)
if use_tvid and None is not prodid:
return use_tvid, prodid, name
prodid = showXML.find('//uniqueid[@type="tvdb"]')
if None is not prodid:
prodid = int(prodid.text)
tvid = TVINFO_TVDB
elif None is not showXML.findtext('tvdbid'):
prodid = int(showXML.findtext('tvdbid'))
tvid = TVINFO_TVDB
elif None is not showXML.findtext('id'):
prodid = int(showXML.findtext('id'))
try:
tvid = TVINFO_TVDB if [s for s in showXML.findall('.//*')
if s.text and -1 != s.text.find('thetvdb.com')] else tvid
except (BaseException, Exception):
pass
else:
logger.log(u"Empty <id> or <tvdbid> field in NFO, unable to find a ID", logger.WARNING)
return empty_return
if None is prodid:
logger.log(u"Invalid Show ID (%s), not using metadata file" % prodid, logger.WARNING)
return empty_return
except (BaseException, Exception) as e:
logger.log(
u"There was an error parsing your existing metadata file: '" + metadata_path + "' error: " + ex(e),
logger.WARNING)
return empty_return
return tvid, prodid, name
def _fanart_urls_from_show(self, show_obj, image_type='banner', lang='en', thumb=False):
# type: (sickbeard.tv.TVShow, AnyStr, AnyStr, bool) -> Optional[List[int, int, AnyStr]]
try:
tvdb_id = show_obj.ids.get(indexer_config.TVINFO_TVDB, {}).get('id', None)
if tvdb_id:
return self._fanart_urls(tvdb_id, image_type, lang, thumb)
except (BaseException, Exception):
pass
logger.log(u'Could not find any %s images on Fanart.tv for %s' % (image_type, show_obj.name), logger.DEBUG)
@staticmethod
def _fanart_urls(tvdb_id, image_type='banner', lang='en', thumb=False):
# type: (int, AnyStr, AnyStr, bool) -> Optional[List[int, int, AnyStr]]
types = {'poster': fanart.TYPE.TV.POSTER,
'banner': fanart.TYPE.TV.BANNER,
'fanart': fanart.TYPE.TV.BACKGROUND,
'poster_thumb': fanart.TYPE.TV.POSTER,
'banner_thumb': fanart.TYPE.TV.BANNER}
try:
if tvdb_id:
request = fanartRequest(apikey=sickbeard.FANART_API_KEY, tvdb_id=tvdb_id, types=types[image_type])
resp = request.response()
itemlist = []
dedupe = []
for art in filter_iter(lambda i: 10 < len(i.get('url', '')) and (lang == i.get('lang', '')[0:2]),
# remove "[0:2]" ... to strictly use only data where "en" is at source
resp[types[image_type]]): # type: dict
try:
url = (art['url'], art['url'].replace('/fanart/', '/preview/'))[thumb]
if url not in dedupe:
dedupe += [url]
itemlist += [
[int(art['id']), int(art['likes']), url]
]
except (BaseException, Exception):
continue
itemlist.sort(key=lambda a: (a[1], a[0]), reverse=True)
return itemlist
except (BaseException, Exception):
raise
def retrieve_show_image(self, image_type, show_obj, which=None, return_links=False, show_infos=None):
# type: (AnyStr, sickbeard.tv.TVShow, bool, bool, ShowInfosDict) -> Optional[bytes]
return self._retrieve_show_image(image_type=image_type, show_obj=show_obj, which=which,
return_links=return_links, show_infos=show_infos)
def write_image(self, image_data, image_path, force=False):
# type: (bytes, AnyStr, bool) -> bool
return self._write_image(image_data=image_data, image_path=image_path, force=force)
| SickGear/SickGear | sickbeard/metadata/generic.py | Python | gpl-3.0 | 53,307 |
#encoding=utf-8
from models import Soci, SalesInvoice, PurchaseInvoice, Client, Provider, PeriodClose, period, periodTaxes
from django.utils.translation import ugettext_lazy as _
from django import forms
from datetime import *
from django.http import HttpResponseRedirect
from decimal import Decimal
from localflavor.es.forms import *
from django.db.models import F
class PeriodManager(object):
@staticmethod
def get_opened_period(user):
#Get current user soci record
obj_Soci = Soci.objects.filter(user=user)
if obj_Soci.count()>0:
#Get extradays
nExtraDays = obj_Soci[0].extra_days if obj_Soci else 0
#Return queryset
qs_Period = period.objects.filter( first_day__lte=datetime.now(),
date_close__gte=datetime.now() - timedelta(days=nExtraDays)
)
if qs_Period.count() > 1:
return period.objects.filter(pk=qs_Period[0].pk)
else:
return qs_Period
return None
@staticmethod
def filterbydefault(request, instance, entity, extra_context):
referer = request.META.get('HTTP_REFERER', '')
showall = request.META['PATH_INFO'] in referer and not request.GET.has_key('timeframe')
if not showall and not request.GET.has_key('period__id__exact'):
qs_Period = PeriodManager.get_opened_period( request.user )
if qs_Period:
period_value = qs_Period[0].id if qs_Period.count() == 1 else 0
q = request.GET.copy()
q['period__id__exact'] = period_value
request.GET = q
request.META['QUERY_STRING'] = request.GET.urlencode()
return HttpResponseRedirect( request.path + "?" + request.GET.urlencode() )
return super(entity,instance).changelist_view(request, extra_context=extra_context)
class SociAdminForm(forms.ModelForm):
model = Soci
class Meta:
localized_fields = ('preTAX', )
class SalesInvoiceForm(forms.ModelForm):
model = SalesInvoice
def __init__(self, *args, **kwargs):
#Initialice to current period only if new (self.instance.user = none)
super(SalesInvoiceForm, self).__init__(*args, **kwargs)
qs_Period = PeriodManager.get_opened_period ( self.request.user )
if qs_Period and not self.instance.user:
self.initial['period'] = qs_Period[0].pk
def clean_date(self):
cleaned_data = self.cleaned_data
date = cleaned_data.get('date')
period = cleaned_data.get('period')
if date > period.date_close:
raise forms.ValidationError(_(u"La data ha de ser menor que el dia final del periode"))
return date
def clean(self):
cleaned_data = self.cleaned_data
#New or Edit
if self.instance.pk:
pk =self.instance.pk
else:
pk=0
#index unique = user, period, numfac + IVA
user = cleaned_data.get('user')
period = cleaned_data.get('period')
num = cleaned_data.get('num')
iva = cleaned_data.get('percentInvoicedVAT')
if SalesInvoice.objects.filter( user=user,
period=period,
num=num, percentInvoicedVAT=iva).exclude(pk=pk).count() > 0:
raise forms.ValidationError(_(u"Ja existeix una factura amb aquest número"))
return cleaned_data
class Meta:
localized_fields = ('value', 'invoicedVAT', 'assignedVAT', 'total')
class PurchaseInvoiceForm(forms.ModelForm):
model = PurchaseInvoice
def __init__(self, *args, **kwargs):
#Initialice to current period only if new (self.instance.user = none)
super(PurchaseInvoiceForm, self).__init__(*args, **kwargs)
qs_Period = PeriodManager.get_opened_period ( self.request.user )
if qs_Period and not self.instance.user:
self.initial['period'] = qs_Period[0].pk
def clean_date(self):
cleaned_data = self.cleaned_data
date = cleaned_data.get('date')
period = cleaned_data.get('period')
if date > period.date_close:
raise forms.ValidationError(_(u"La data ha de ser menor que el dia final del periode"))
return date
def clean(self):
cleaned_data = self.cleaned_data
if self.instance.pk:
pk =self.instance.pk
else:
pk=0
user = self.cleaned_data.get('user')
period = cleaned_data.get('period')
num = cleaned_data.get('num')
iva = cleaned_data.get('percentExpencedVAT')
existing = PurchaseInvoice.objects.filter( user=user,
period=period,
num=num, percentExpencedVAT=iva).exclude(pk=pk)
if existing.count() > 0:
raise forms.ValidationError(_(u"Ja existeix una factura amb aquest número"))
return cleaned_data
class Meta:
localized_fields = ('value', )
class CardIDValidator():
def validate(self, CIF, oID):
if CIF == "" and oID == "":
raise forms.ValidationError(_(u"Has d'introduïr o CIF/NIF/NIE o Altres identificadors. Un dels dos."))
if CIF != "" and oID != "":
raise forms.ValidationError(_(u"Has d'introduïr o CIF/NIF/NIE o Altres identificadors. No tots dos."))
if CIF != "":
myValidator = ESIdentityCardNumberField()
myValidator.clean(CIF)
class ClientForm(forms.ModelForm):
def clean_CIF(self):
cleaned_data = self.cleaned_data
CIF = cleaned_data.get('CIF')
return CIF.replace(" ","").replace("-","")
def clean(self):
cleaned_data = self.cleaned_data
CIF = cleaned_data.get('CIF')
oID = cleaned_data.get('otherCIF')
validator = CardIDValidator()
validator.validate( CIF, oID)
if self.instance.pk:
pk =self.instance.pk
else:
pk=0
if Client.objects.filter( user=self.request.user, name=cleaned_data.get('name')).exclude(pk=pk).count() > 0:
raise forms.ValidationError(_(u"Ja existeix un Proveïdor amb aquest Nom Fiscal"))
if CIF and Client.objects.filter( user=self.request.user, CIF=CIF).exclude(pk=pk).count() > 0:
raise forms.ValidationError(_(u"Ja existeix un Client amb aquest identificador"))
if oID and Client.objects.filter( user=self.request.user, otherCIF=oID).exclude(pk=pk).count() > 0:
raise forms.ValidationError(_(u"Ja existeix un Client amb aquest identificador"))
return cleaned_data
class Meta:
model = Client
class ProviderForm(forms.ModelForm):
def clean_CIF(self):
cleaned_data = self.cleaned_data
CIF = cleaned_data.get('CIF')
return CIF.replace(" ","").replace("-","")
def clean(self):
cleaned_data = self.cleaned_data
CIF = cleaned_data.get('CIF')
oID = cleaned_data.get('otherCIF')
validator = CardIDValidator()
validator.validate(CIF, oID)
if self.instance.pk:
pk =self.instance.pk
else:
pk=0
if Provider.objects.filter( user=self.request.user, name=cleaned_data.get('name')).exclude(pk=pk).count() > 0:
raise forms.ValidationError(_(u"Ja existeix un Proveïdor amb aquest Nom Fiscal"))
if CIF and Provider.objects.filter( user=self.request.user, CIF=CIF).exclude(pk=pk).count() > 0:
raise forms.ValidationError(_(u"Ja existeix un Proveïdor amb aquest identificador"))
if oID and Provider.objects.filter( user=self.request.user, otherCIF=oID).exclude(pk=pk).count() > 0:
raise forms.ValidationError(_(u"Ja existeix un Proveïdor amb aquest identificador"))
return cleaned_data
class Meta:
model = Provider
class PeriodCloseForm(forms.ModelForm):
VAT_1 = forms.DecimalField(label=_(u"IVA Facturat - Despeses (€)"), localize=True, required=False)
VAT_2 = forms.DecimalField(label=_(u"IVA Assignat - Despeses (€)"), localize=True, required=False)
Savings = forms.DecimalField(label=_(u"IVA Facturat - Assignat (€)"), localize=True, required=False)
TotalVAT = forms.DecimalField(label=_(u"Total IVA (€)"), localize=True, required=False)
TotalIRPF = forms.DecimalField(label=_(u"Total IRPF (€)"), localize=True, required=False)
subTotalEuro = forms.DecimalField(label=_(u"Total Quota i Aportació (€)"), localize=True, required=False)
subTotalEco = forms.DecimalField(label=_(u"Total Quota i Aportació (ECOS)"), localize=True, required=False)
TotalEuro = forms.DecimalField(label=_(u"TOTAL A ABONAR (€)"), help_text = _(u"Total a abonar trimestre (IVA, IRPF, Quota i Aportació) €"), localize=True, required=False)
TotalEco = forms.DecimalField(label=_(u"TOTAL A ABONAR (ECOS)"), help_text = _(u"Total a abonar trimestre (IVA, IRPF, Quota i Aportació) ECOS"), localize=True, required=False)
def __init__(self, *args, **kwargs):
super(PeriodCloseForm, self).__init__(*args, **kwargs)
#Recalculate values if editing
user = self.instance.user
if user:
#PERIOD
qs_Sales = SalesInvoice.objects.filter(period=self.instance.period, user=user)
sales_total = sales_invoicedVAT = sales_assignedVAT = sales_totalVAT = Decimal('0.00')
for item in qs_Sales.all():
sales_total += item.value
sales_invoicedVAT += item.invoicedVAT()
sales_assignedVAT += item.assignedVAT()
sales_totalVAT += item.total()
self.initial['Sales_total'] = Decimal ( "%.2f" % sales_total )
self.initial['Sales_invoicedVAT'] = Decimal ( "%.2f" % sales_invoicedVAT )
self.initial['Sales_assignedVAT'] = Decimal ( "%.2f" % sales_assignedVAT )
self.initial['Sales_totalVAT'] = Decimal ( "%.2f" % sales_totalVAT )
qs_Purchase = PurchaseInvoice.objects.filter(period=self.instance.period, user=self.instance.user)
purchases_total = purchases_expencedVAT = purchases_IRPFRetention = purchases_totalVAT = Decimal('0.00')
for item in qs_Purchase.all():
purchases_total += item.value
purchases_expencedVAT += item.expencedVAT()
purchases_IRPFRetention += item.IRPFRetention()
purchases_totalVAT += item.total()
self.initial['Purchases_total'] = Decimal ( "%.2f" % purchases_total )
self.initial['Purchases_expencedVAT'] = Decimal ( "%.2f" % purchases_expencedVAT )
self.initial['Purchases_IRPFRetention'] = Decimal ( "%.2f" % purchases_IRPFRetention )
self.initial['Purchases_totalVAT'] = Decimal ( "%.2f" % purchases_totalVAT )
#VATS
totalVAT1 = Decimal ( "%.2f" % (sales_invoicedVAT - purchases_expencedVAT) )
if totalVAT1 < 0:
totalVAT1 = 0
totalVAT2 = Decimal ( "%.2f" % (sales_assignedVAT - purchases_expencedVAT) )
if totalVAT2 < 0:
totalVAT2 = 0
self.initial['VAT_1'] = totalVAT1
self.initial['VAT_2'] = totalVAT2
#QUOTA
qs_Tax = periodTaxes.objects.filter(min_base__lte=sales_total, max_base__gte=sales_total)
value = Decimal('0.00')
if qs_Tax.count() == 1:
value = Decimal ( "%.2f" % qs_Tax[0].taxId )
else:
value = 'Consultar'
self.initial['periodTAX'] = value
def clean_CESnumber(self):
cleaned_data = self.cleaned_data
ptax= cleaned_data.get('periodTAXeco')
donation = cleaned_data.get('donation_eco')
ces = cleaned_data.get('CESnumber')
mustInputCes = Decimal ( ptax ) > 0 or Decimal ( donation ) > 0
print mustInputCes
print ces == ""
if mustInputCes and ces == "":
raise forms.ValidationError(_(u"Tens que especificar el teu compte CES"))
return ces
class Meta:
model = PeriodClose
localized_fields = ('Sales_total', 'Sales_invoicedVAT', 'Sales_assignedVAT', 'Sales_totalVAT',
'Purchases_total', 'Purchases_expencedVAT', 'Purchases_IRPFRetention', 'Purchases_totalVAT',
'Savings', 'TotalVAT', 'TotalIRPF', 'Savings',
'Savings_donation', 'periodTAX', 'preTAX', 'periodTAXeuro', 'periodTAXeco', 'donation_euro', 'donation_eco',
'subTotalEuro', 'subTotalEco', 'TotalEuro', 'TotalEco', 'payment_entity')
| aleph1888/calaCOOP | Invoices/forms.py | Python | gpl-3.0 | 11,060 |
import os
from crpropa import *
import numpy as np
class Benchmark(object):
""" Benchmark scenario
Specs: https://www.auger.unam.mx/AugerWiki/BenchmarkScenario
PA GAP note: GAP-2012-138
"""
def __init__(self):
""" Initialize required objects and parameters on default values
All parameters can be overridden
"""
# Containers
self.m = ModuleList()
self.bField = None
self.obs = Observer()
self.source = Source()
self.outputFileName = 'default_output.txt'
# Box proporties
self.boxOrigin = Vector3d(54, 54, 54)*Mpc
self.boxSize = 132*Mpc
self.gridFile = os.path.expanduser(
'~/.virtualenvs/crpropa/share/crpropa/bench_54-186Mpc_440bins.raw')
self.Brms = 1.
self.Bnorm = 1.
self.sources_file = 'BenchmarkSources.txt'
self.composition = None
# Observer size and position
self.obsPosition = Vector3d(118.34, 117.69, 119.2)*Mpc
self.obsSize = 1.*Mpc
# Propagational proporties
self.minEnergy = 1.*EeV
self.maxTrajectory = redshift2ComovingDistance(2)
# General source properties
self.sourceMinEnergy = 1.*EeV
self.sourceMaxEnergy = 26*1000.*EeV
self.sourceSpectralIndex = -1.
# Random seeds
self.turbulenceSeed = 2308
self.generalSeed = 185652056
# Candidates
self.NEvents = 5000
self.A = 1
self.Z = 1
def init_bField(self):
""" Initialize magnetic field
"""
# modulation grid
mgrid = ScalarGrid( self.boxOrigin, 440, self.boxSize / 440 )
loadGrid(mgrid, self.gridFile, self.Bnorm)
# turbulent vector grid
boxSpacing = 13.2*Mpc/440
vgrid = VectorGrid(self.boxOrigin, 440, boxSpacing)
initTurbulence(vgrid, self.Brms, 2.*boxSpacing, 2.2*Mpc, -11./3., self.turbulenceSeed)
# total magnetic field
self.bField = ModulatedMagneticFieldGrid(vgrid, mgrid)
def init_observer(self):
""" Insert observer(s)
"""
self.obs.add( ObserverSmallSphere( self.obsPosition, self.obsSize ) )
# Generally candidate is not deactivated on detection
self.obs.setDeactivateOnDetection( False )
out = TextOutput( self.outputFileName, Output.Event3D )
self.obs.onDetection( out )
def add_composition( self ):
""" Composition table
"""
composition_table = [
(1, 1, 92000.),
(4, 2, 13000.),
(9, 4, 4.5),
(11, 5, 4.5),
(12, 6, 447.4),
(14, 7, 34.2),
(16, 8, 526.3),
(19, 9, 0.3),
(20, 10, 58.),
(23, 11, 3.2),
(24, 12, 108.),
(27, 13, 7.8),
(28, 14, 100.),
(32, 16, 13.1),
(40, 18, 2.2),
(40, 20, 6.5),
(45, 21, 0.97),
(48, 22, 0.97),
(51, 23, 0.97),
(52, 24, 1.5),
(55, 25, 1.1),
(56, 26, 97.)
]
self.composition = SourceComposition(self.sourceMinEnergy,
self.sourceMaxEnergy,
self.sourceSpectralIndex)
for A, Z, a in composition_table:
if Z > 2:
a *= 10 # Bisi's scaling factor
self.composition.add(nucleusId(A, Z), a)
def init_sources( self ):
""" Deploy CR sources and their proporties
"""
data = np.genfromtxt(self.sources_file, comments='#', delimiter=' ', dtype=np.float64)
sX, sY, sZ = data[:,0], data[:,1], data[:,2]
sourceList = SourceMultiplePositions()
for x, y, z in zip(sX, sY, sZ):
sourceList.add(Vector3d(x, y, z))
self.source.add(sourceList)
self.source.add(SourceIsotropicEmission())
if self.A and self.Z: # if inserting single type particle source
self.source.add(SourceParticleType(nucleusId(self.A, self.Z )))
self.source.add(SourcePowerLawSpectrum(self.sourceMinEnergy,
self.sourceMaxEnergy,
self.sourceSpectralIndex))
else:
self.add_composition()
self.source.add( self.composition )
def init_interactions(self):
""" Used interactions
"""
EBL = IRB_Gilmore12
self.m.add(PhotoPionProduction(CMB))
self.m.add(PhotoPionProduction(EBL))
self.m.add(PhotoDisintegration(CMB))
self.m.add(PhotoDisintegration(EBL))
self.m.add(NuclearDecay())
self.m.add(ElectronPairProduction(CMB))
self.m.add(ElectronPairProduction(EBL))
def init_moduleList(self):
""" Initialize moduleList
"""
self.m.add(DeflectionCK(self.bField, 1e-3, 10.*kpc, 10.*Mpc))
self.m.add(MinimumEnergy(self.minEnergy))
self.m.add(MaximumTrajectoryLength(self.maxTrajectory))
self.m.add(ReflectiveBox(self.boxOrigin, Vector3d(self.boxSize)))
self.m.add(self.obs)
def init(self):
""" Initialized everything before the start of simulation
"""
Random_seedThreads(self.generalSeed)
self.init_bField()
self.init_sources()
self.init_observer()
self.init_interactions()
self.init_moduleList()
def run(self):
""" Run the simulation
"""
self.m.setShowProgress(True)
self.m.run(self.source, self.NEvents, True)
| adundovi/CRPropa3-scripts | scenarios/benchmark/BenchmarkClass.py | Python | gpl-3.0 | 5,796 |
'''
Live plots data recieved over serial
'''
import collections
import matplotlib.pyplot as plt
import serial
import serial.tools.list_ports as list_ports
import unittest
import threading
import atexit
import random
import enum
import time
DELIMETER = b","
ENDBYTE = b'\r'
STARTBYTE = b"\n"
class LivePlot:
'''
Thin wrapper over the default plot to provide an interface for live plotting
'''
def __init__(self, data_source,
*, title=None,
xlabel=None,
ylabel=None,
max_points=100):
self._data_source = data_source
self._max_points = max_points
self.x_data = collections.deque([None] * max_points, maxlen=max_points)
self.y_data = collections.deque([None] * max_points, maxlen=max_points)
self.z_data = collections.deque([None] * max_points, maxlen=max_points)
self.a_data = collections.deque([None] * max_points, maxlen=max_points)
###################################################################
# We make the implicit assumption that the data will be displayed #
# in a 1x1 ratio. #
###################################################################
self._figure = plt.figure()
self._axis_1 = self._figure.add_subplot(2, 2, 1)
self._axis_2 = self._figure.add_subplot(2, 2, 2)
self._axis_3 = self._figure.add_subplot(2, 2, 3)
self._axis_4 = self._figure.add_subplot(2, 2, 4)
self._axis_1.set_title("a")
self._axis_1.set_title("b")
self._axis_1.set_title("c")
self._axis_1.set_title("d")
if title: self._axis_1.set_title(title)
if xlabel: self._axis_1.set_xlabel(xlabel)
if ylabel: self._axis_1.set_ylabel(ylabel)
def add_data(self, x, y, z, a, *extra):
'''
adds the arbitrary x and y data points to the data set used by the plot
If adding the data would have the plot exceed max_points, the least
recently added data point is removed
'''
if extra: print(extra)
self.x_data.append(x)
self.y_data.append(y)
self.z_data.append(z)
self.a_data.append(a)
def _add_data_thread(self, shutdown_event):
try:
f = open('data' + str(random.randint(0, 100000)) + '.csv', "w+")
for new_data in self._data_source:
if shutdown_event.is_set():
f.close()
return
self.add_data(*new_data)
f.write(str(new_data).replace("(", "\n").replace(")", " ").strip(" "))
# time.sleep(1 / 60.0)
except (KeyboardInterrupt, SystemExit):
f.close()
return
def plot_forever(self):
'''
Continuously plots data from the data source
'''
#################################################################
# Since the target model for a live plot is a continuous #
# data source, we start a new thread to do that data collection #
#################################################################
shutdown_event = threading.Event()
data_col_thread = threading.Thread(target=self._add_data_thread, args=(shutdown_event,))
data_col_thread.start()
def _kill_thread():
'''
kills the data collection thread
'''
data_col_thread.do_run = False
data_col_thread.join()
atexit.register(_kill_thread)
plt.ion()
line_1, = self._axis_1.plot(list(self.x_data), list(self.x_data), 'r*')
line_2, = self._axis_2.plot(list(self.y_data), list(self.x_data), 'b-')
line_3, = self._axis_1.plot(list(self.z_data), list(self.x_data), 'r*')
line_4, = self._axis_2.plot(list(self.a_data), list(self.x_data), 'b-')
while True:
try:
line_1.set_ydata([a for a in list(self.x_data)])
self._axis_1.clear()
self._axis_1.plot(range(len(self.x_data)), list(self.x_data), 'r-')
line_2.set_ydata([a for a in list(self.y_data)])
self._axis_2.clear()
self._axis_2.plot(list(self.x_data), list(self.y_data), 'b-')
line_3.set_ydata([a for a in list(self.z_data)])
self._axis_3.clear()
self._axis_3.plot(list(self.x_data), list(self.z_data), 'g-')
line_4.set_ydata([a for a in list(self.a_data)])
self._axis_4.clear()
self._axis_4.plot(list(self.x_data), list(self.a_data), 'r-')
plt.pause(1.0 / 60.0)
except (KeyboardInterrupt, SystemExit, Exception) as e:
shutdown_event.set()
raise e
def establish_serial(baud_rate=None, serial_path=None):
while not baud_rate:
try:
baud_rate = input("What is the baud rate: ")
baud_rate = int(baud_rate)
except (EOFError, ValueError):
baud_rate = None
print("Entered baud rate was not a number, please try again")
if not serial_path:
ports = list_ports.comports()
choices = {}
for i, p in enumerate(ports):
print(i, end='\t')
print(p)
choices[i] = p
choice = input("Which port: ")
serial_path = str(choices[int(choice)]).split(' ')[0]
return BeeConnection(serial_path, baud_rate)
class ParserStates(enum.Enum):
DATA_READY = "The data is ready to be taken"
WAITING_FOR_START = "Waiting for a start byte"
TAKING_NUMBER_AWAITING_SIGN = "Taking number, waiting for +/- info"
TAKING_NUMBER = "Taking number w/ no decimal"
TAKING_NUMBER_DECIMAL = "Taking number; encountered decimal pt."
TOOK_END_BYTE = "Encountered the end byte"
ERROR_STATE = "ERROR STATE"
class Parser:
def __init__(self, *, num_data=4, delimeter=DELIMETER, endbyte=ENDBYTE, startbyte=STARTBYTE):
'''
FSM data parser for serial data
'''
self.num_data = num_data
self.state = ParserStates.WAITING_FOR_START
self.data = collections.deque()
self.number_buffer = collections.deque()
self.delimeter = delimeter
self.endbyte = endbyte
self.startbyte = startbyte
self.numbers_taken = 0
def reset(self):
'''
Sets the state to WAITING_FOR_START and clears the data
'''
self.state = ParserStates.WAITING_FOR_START
self.data = collections.deque()
self.number_buffer = []
self.numbers_taken = 0
def _crunch_number_buffer(self):
'''
tries to add what is in the number buffer to the data set
sets the state to an ERROR_STATE if there is a failure
'''
try:
self.data.append(float(b''.join(self.number_buffer)))
self.number_buffer = []
except:
self.state = ParserStates.ERROR_STATE
def feed(self, char):
'''
Feeds a single char to the parser
TODO refactor out the repetitive parts of this code
'''
if len(char) != 1 and self.state != ParserStates.WAITING_FOR_START:
self.state = ParserStates.ERROR_STATE
return
decimal_point = b'.'
negative_sign = b'-'
number_chars = [b'1', b'2', b'3', b'4', b'5', b'6', b'7', b'8', b'9', b'0', decimal_point, negative_sign]
if self.state == ParserStates.WAITING_FOR_START:
if char == self.startbyte:
self.state = ParserStates.TAKING_NUMBER_AWAITING_SIGN
else:
return
elif self.state == ParserStates.TAKING_NUMBER_AWAITING_SIGN:
if char in number_chars:
if char == negative_sign:
self.number_buffer.append(char)
self.state = ParserStates.TAKING_NUMBER
elif char == decimal_point:
self.number_buffer.append(char)
self.state = ParserStates.TAKING_NUMBER_DECIMAL
else:
self.number_buffer.append(char)
self.state = ParserStates.TAKING_NUMBER
else:
self.state = ParserStates.ERROR_STATE
elif self.state == ParserStates.TAKING_NUMBER:
if char in number_chars:
self.number_buffer.append(char)
if char == decimal_point:
self.state = ParserStates.TAKING_NUMBER_DECIMAL
elif char == self.delimeter and not self.numbers_taken >= self.num_data:
self.numbers_taken += 1
self.state = ParserStates.TAKING_NUMBER_AWAITING_SIGN
self._crunch_number_buffer()
elif char == self.endbyte and self.numbers_taken + 1 == self.num_data:
self.state = ParserStates.DATA_READY
self._crunch_number_buffer()
else:
self.state = ParserStates.ERROR_STATE
elif self.state == ParserStates.TAKING_NUMBER_DECIMAL:
if char in number_chars and char != decimal_point:
self.number_buffer.append(char)
elif char == self.delimeter and not self.numbers_taken >= self.num_data:
self.numbers_taken += 1
self.state = ParserStates.TAKING_NUMBER_AWAITING_SIGN
self._crunch_number_buffer()
elif char == self.endbyte and self.numbers_taken + 1 == self.num_data:
self.state = ParserStates.DATA_READY
self._crunch_number_buffer()
else:
self.state = ParserStates.ERROR_STATE
class BeeConnection:
'''
Iterator that represents a view of the data being sent over serial
by the exBee
'''
def __init__(self, serial_path, baud_rate, timeout=1):
'''
initializes serial connection. If initial connect fails, waits half
a second and tries again. If the connection still is not established
after 10 such additional attempts, raises ConnectionError
'''
self._connection = serial.Serial(serial_path, baud_rate, timeout=timeout)
attempts = 0
while not self._connection.isOpen():
if attempts == 10:
raise ConnectionError("Failed to connect to serial device")
attempts += 1
time.sleep(0.5)
self.raw_data = collections.deque()
self._parser = Parser(num_data=6)
def close(self):
'''
cleans up and closes the serial connection
'''
if self._connection.isOpen():
self._connection.flush()
self._connection.close()
if not self._connection.isOpen():
print("Serial port successfully closed")
else:
print("Something went wrong closing the connection")
def __exit__(self):
'''
closes connection when the object is used in a with block
'''
self.close()
def __del__(self):
'''
closes connection if the object is deleted
'''
self.close()
def __iter__(self):
while True:
try:
if self._connection.inWaiting():
new_data = self._connection.read()
for char in new_data:
char = bytes([char])
self._parser.feed(char)
if self._parser.state == ParserStates.ERROR_STATE:
self._parser.reset()
elif self._parser.state == ParserStates.DATA_READY:
yield tuple(self._parser.data)
self._parser.reset()
else:
pass
except:
raise StopIteration
if __name__ == '__main__':
conn = establish_serial(9600)
plot = LivePlot(conn)
plot.plot_forever()
###################################################################################
class TestParser(unittest.TestCase):
def test_parser_errors(self):
p = Parser()
assert p.state == ParserStates.WAITING_FOR_START
p.feed(STARTBYTE)
assert p.state == ParserStates.TAKING_NUMBER_AWAITING_SIGN
p.feed(b'k')
assert p.state == ParserStates.ERROR_STATE
def test_parser_simple_full_trip(self):
p = Parser()
p.feed(STARTBYTE)
assert p.state == ParserStates.TAKING_NUMBER_AWAITING_SIGN
p.feed(b"2")
assert p.state == ParserStates.TAKING_NUMBER
p.feed(DELIMETER)
assert p.state == ParserStates.TAKING_NUMBER_AWAITING_SIGN
p.feed(b"2")
assert p.state == ParserStates.TAKING_NUMBER
p.feed(DELIMETER)
assert p.state == ParserStates.TAKING_NUMBER_AWAITING_SIGN
p.feed(b"2")
assert p.state == ParserStates.TAKING_NUMBER
p.feed(DELIMETER)
assert p.state == ParserStates.TAKING_NUMBER_AWAITING_SIGN
p.feed(b"2")
assert p.state == ParserStates.TAKING_NUMBER
p.feed(ENDBYTE)
assert p.state == ParserStates.DATA_READY
assert list(p.data) == [2.0, 2.0, 2.0, 2.0]
def test_parser_wait_for_start(self):
p = Parser()
assert p.state == ParserStates.WAITING_FOR_START
p.feed(b'JUNK')
assert p.state == ParserStates.WAITING_FOR_START
p.feed(b'J')
assert p.state == ParserStates.WAITING_FOR_START
p.feed(b'RR')
assert p.state == ParserStates.WAITING_FOR_START
p.feed(DELIMETER)
assert p.state == ParserStates.WAITING_FOR_START
p.feed(ENDBYTE)
assert p.state == ParserStates.WAITING_FOR_START
p.feed(STARTBYTE)
assert p.state == ParserStates.TAKING_NUMBER_AWAITING_SIGN
p.feed(b'-')
assert p.state == ParserStates.TAKING_NUMBER
def test_parser_complex_trip(self):
p = Parser()
p.feed(STARTBYTE)
p.feed(b'2')
p.feed(b'9')
p.feed(b'.')
p.feed(b'1')
p.feed(DELIMETER)
p.feed(b'1')
p.feed(b'2')
p.feed(b'3')
p.feed(b'4')
p.feed(b'5')
p.feed(b'6')
p.feed(b'7')
p.feed(b'8')
p.feed(b'9')
p.feed(b'0')
p.feed(DELIMETER)
p.feed(b'3')
p.feed(b'.')
p.feed(DELIMETER)
p.feed(b'1')
p.feed(b'5')
p.feed(ENDBYTE)
assert p.state == ParserStates.DATA_READY
assert list(p.data) == [29.1, 1234567890.0, 3.0, 15.0]
def test_parser_decimal_read(self):
p = Parser()
p.feed(STARTBYTE)
p.feed(b'4')
assert p.state == ParserStates.TAKING_NUMBER
p.feed(b'4')
assert p.state == ParserStates.TAKING_NUMBER
p.feed(b'.')
assert p.state == ParserStates.TAKING_NUMBER_DECIMAL
p.feed(b'4')
assert p.state == ParserStates.TAKING_NUMBER_DECIMAL
p.feed(b'4')
assert p.state == ParserStates.TAKING_NUMBER_DECIMAL
p.feed(b'4')
p.feed(DELIMETER)
p.feed(b'4')
assert p.state == ParserStates.TAKING_NUMBER
p.feed(b'4')
assert p.state == ParserStates.TAKING_NUMBER
p.feed(b'.')
assert p.state == ParserStates.TAKING_NUMBER_DECIMAL
p.feed(b'4')
assert p.state == ParserStates.TAKING_NUMBER_DECIMAL
p.feed(b'4')
assert p.state == ParserStates.TAKING_NUMBER_DECIMAL
p.feed(b'4')
p.feed(DELIMETER)
p.feed(b'4')
assert p.state == ParserStates.TAKING_NUMBER
p.feed(b'4')
assert p.state == ParserStates.TAKING_NUMBER
p.feed(b'.')
assert p.state == ParserStates.TAKING_NUMBER_DECIMAL
p.feed(b'4')
assert p.state == ParserStates.TAKING_NUMBER_DECIMAL
p.feed(b'4')
assert p.state == ParserStates.TAKING_NUMBER_DECIMAL
p.feed(b'4')
p.feed(DELIMETER)
p.feed(b'4')
assert p.state == ParserStates.TAKING_NUMBER
p.feed(b'4')
assert p.state == ParserStates.TAKING_NUMBER
p.feed(b'.')
assert p.state == ParserStates.TAKING_NUMBER_DECIMAL
p.feed(b'4')
assert p.state == ParserStates.TAKING_NUMBER_DECIMAL
p.feed(b'4')
assert p.state == ParserStates.TAKING_NUMBER_DECIMAL
p.feed(b'4')
p.feed(ENDBYTE)
assert p.state == ParserStates.DATA_READY
assert list(p.data) == [44.444, 44.444, 44.444, 44.444]
p.feed(b's')
assert p.state == ParserStates.DATA_READY
p.reset()
assert p.state == ParserStates.WAITING_FOR_START
def test_round_trips(self):
p = Parser()
for char in STARTBYTE + \
b'32.21' + \
DELIMETER + \
b'11' + \
DELIMETER + \
b'32' + \
DELIMETER + \
b'111.2' + \
ENDBYTE:
p.feed(bytes([char]))
assert p.state == ParserStates.DATA_READY
assert list(p.data) == [32.21, 11, 32, 111.2]
def test_negative_number_cases(self):
p = Parser()
for char in STARTBYTE + \
b'-32.21' + \
DELIMETER + \
b'-11' + \
DELIMETER + \
b'-.32' + \
DELIMETER + \
b'111.2' + \
ENDBYTE:
p.feed(bytes([char]))
assert p.state == ParserStates.DATA_READY
assert list(p.data) == [-32.21, -11, -.32, 111.2]
def test_complex_more_numbers(self):
p = Parser(num_data=7)
for char in STARTBYTE + \
b'-32.21' + \
DELIMETER + \
b'-11' + \
DELIMETER + \
b'-.32' + \
DELIMETER + \
b'-.32' + \
DELIMETER + \
b'-.32' + \
DELIMETER + \
b'-.32' + \
DELIMETER + \
b'111.2' + \
ENDBYTE:
p.feed(bytes([char]))
assert p.state == ParserStates.DATA_READY
assert list(p.data) == [-32.21, -11, -.32, -.32, -.32, -.32, 111.2]
def test_non_matching_data(self):
p = Parser(num_data=7)
for char in STARTBYTE + \
b'-32.21' + \
DELIMETER + \
b'-11' + \
DELIMETER + \
b'-.32' + \
DELIMETER + \
b'-.32' + \
DELIMETER + \
b'-.32' + \
DELIMETER + \
b'-.32' + \
ENDBYTE:
p.feed(bytes([char]))
assert p.state != ParserStates.DATA_READY
p.reset()
p = Parser(num_data=7)
for char in STARTBYTE + \
b'-32.21' + \
DELIMETER + \
b'-11' + \
DELIMETER + \
b'-.32' + \
DELIMETER + \
b'-.32' + \
DELIMETER + \
b'-.32' + \
DELIMETER + \
b'-.32' + \
DELIMETER + \
b'111.2' + \
ENDBYTE:
p.feed(bytes([char]))
assert p.state == ParserStates.DATA_READY
assert list(p.data) == [-32.21, -11, -.32, -.32, -.32, -.32, 111.2]
| AIAANortheastern/NASA-SL-2017 | main/groundstation/groundstation.py | Python | gpl-2.0 | 19,621 |
from math import sin
from math import sqrt
from math import pi
from copy import deepcopy
from vector import *
class Structure(object):
#Radians = True tells the program to use radians through trigonometric calculations
#Debug = False tells the program to not print out information that can aid during the debugging process
#Filename = "Data" tells the program to write to a file called Data
def __init__(self, Radians = True, Debug = False, Filename = "Data"):
self.Radians = Radians
self.Debug = Debug
self.Filename = Filename
fp = open("POSCAR", "r")
self.Rawdata = []
x = 0
for i in fp:
if (x < 2):
x += 1
elif (x > 4):
break
else:
x +=1
self.Rawdata.append(i.split())
if (self.Debug == True):
self.PrintRawData()
self.DetermineConstants()
fp.close()
def PrintRawData(self):
print("Raw Data gathered")
for i in self.Rawdata:
print(i)
print("")
def DetermineConstants(self):
self.InitializeVectors()
self.InitializeAngles()
#The temp vectors will be the only vectors modified at runtime. The other
#vectors are treated as constants although python does not allow to define
#objects as such.
def InitializeVectors(self):
temp_comp = [0.0, 0.0, 0.0]
for i in range(0, 3):
temp_comp[i] = (float(self.Rawdata[0][i]))
self.vector_a = Vector(components = temp_comp)
temp_comp = [0.0, 0.0, 0.0]
for i in range(0, 3):
temp_comp[i] = float(self.Rawdata[1][i])
self.vector_b = Vector(components = temp_comp)
temp_comp = [0.0, 0.0, 0.0]
for i in range(0, 3):
temp_comp[i] = float(self.Rawdata[2][i])
self.vector_c = Vector(components = temp_comp)
if (self.Debug == True):
print("Vector a = " + str(self.vector_a.GetVectorComponents()))
print("Vector b = " + str(self.vector_b.GetVectorComponents()))
print("Vector c = " + str(self.vector_c.GetVectorComponents()))
print("")
self.ResetTempVolume()
def InitializeAngles(self):
self.alpha = FindAngleBetweenVectors(self.vector_a, self.vector_b, Radians = self.Radians)
self.beta = FindAngleBetweenVectors(self.vector_a, self.vector_c, Radians = self.Radians)
self.gamma = FindAngleBetweenVectors(self.vector_b, self.vector_c, Radians = self.Radians)
if (self.Debug == True):
print("alpha = %f" % self.alpha)
print("beta = %f" % self.beta)
print("gamma = %f" % self.gamma)
print("")
self.ResetTempAngles()
#Would love to reduce the first if/elseif/else conditional to a one liner.
#However the different components have to be obtained somehow.
#Might may DetermineTempVectorbyConstant return a list to remedy.
def ChangeTempVolume(self, constant, diff):
if (constant == 'a'):
self.temp_vector_a.ModifyComponent(0, diff)
elif (constant == 'b'):
self.temp_vector_b.ModifyComponent(1, diff)
elif (constant == 'c'):
self.temp_vector_c.ModifyComponent(2, diff)
else:
print("Was not given a proper constant")
return
#If the else statement was not called, this method is called to Recalculate
#the temp angles given the fact that the volume has changed.
self.RecalculateTempAngles()
#This function returns the correct vector object which is determined by the constant.
#Reduces a lot of boilerplate code
def DetermineTempVectorbyConstant(self, constant):
if (constat == 'a'):
return self.temp_vector_a
elif (constant == 'b'):
return self.temp_vector_b
elif (constant == 'b'):
return self.temp_vector_c
else:
return None
def RecalculateTempAngles(self):
self.temp_alpha = self.temp_beta = self.temp_gamma = None
self.temp_alpha = FindAngleBetweenVectors(self.temp_vector_a, self.temp_vector_b, Radians = self.Radians)
self.temp_beta = FindAngleBetweenVectors(self.temp_vector_a, self.temp_vector_c, Radians = self.Radians)
self.temp_gamma = FindAngleBetweenVectors(self.temp_vector_b, self.temp_vector_c, Radians = self.Radians)
if (self.Debug == True):
print("temp alpha = %f" % self.temp_alpha)
print("temp beta = %f" % self.temp_beta)
print("temp gamma = %f" % self.temp_gamma)
print("")
def ResetTempVolume(self):
self.temp_vector_a = deepcopy(self.vector_a)
self.temp_vector_b = deepcopy(self.vector_b)
self.temp_vector_c = deepcopy(self.vector_c)
if (self.Debug == True):
print("Temp vector a = " + str(self.temp_vector_a.GetVectorComponents()))
print("Temp vector b = " + str(self.temp_vector_b.GetVectorComponents()))
print("Temp vector c = " + str(self.temp_vector_c.GetVectorComponents()))
print("")
def ResetTempAngles(self):
self.temp_alpha = self.alpha
self.temp_beta = self.beta
self.temp_gamma = self.gamma
def WriteTempData(self):
fp = open(self.Filename, "a")
fp.write(str(self.temp_vector_a.GetVectorComponents()) + "\n")
fp.write(str(self.temp_vector_b.GetVectorComponents()) + "\n")
fp.write(str(self.temp_vector_c.GetVectorComponents()) + "\n")
fp.close()
def Sin_RadToDeg(self, theta):
return sin((180/pi) * theta)
def Sin_DegToRad(self, theta):
return sin((pi/180) * theta)
class Monoclinic(Structure):
#This function is for debug purposes. Call only if the sin of a number
#needs to be verified for consistency. Sometimes the angle provided
#may be different from an expected value, so this can help solve certain issues.
def GetAngles(self):
if (self.Debug == True):
if (self.Radians == True):
print("Radians")
print("sin(%f) = %f" % (self.beta, sin(self.beta)))
else:
print("Degrees")
print("sin(%f) = %f" % (self.beta, self.Sin_DegToRad(self.beta)))
print("")
def ChangeTempVolume(self, constant, diff):
#Makes assumptions to where the perpendicular vectors are. I haven't
#encountered a POSCAR file that is any different, however I see no reason
#why there wouldn't be. In any event, this code will result in a cell losing
#it's initial strucutre if the assumptions are incorrect.
if (constant == 'a'):
self.temp_vector_a.ModifyComponent(0, diff)
elif (constant == 'b'):
self.temp_vector_a.ModifyComponent(0, diff)
elif (constant == 'c'):
self.temp_vector_a.SpreadValueAcrossComponents([0, 2], diff)
else:
print("Invalid constant passed")
def GetVolume(self):
magnitude_product = self.vector_a.GetMagnitude() * self.vector_b.GetMagnitude() * self.vector_c.GetMagnitude()
if (self.Radians == True):
return magnitude_product * sin(self.beta)
else:
return magnitude_product * self.Sin_DegToRad(self.beta)
def GetTempVolume(self):
magnitude_product = self.temp_vector_a.GetMagnitude() * self.temp_vector_b.GetMagnitude() * self.temp_vector_c.GetMagnitude()
if (self.Radians == True):
return magnitude_product * sin(self.temp_beta)
else:
return magnitude_product * self.Sin_DegToRad(self.temp_beta)
def WriteTempVolume(self):
fp = open(self.Filename, "a")
fp.write("Temp Volume = %f\n\n" % (self.GetTempVolume()))
fp.close()
class Cubic(Structure):
def GetAngles(self):
print("Angles not necessary in cubic structure")
def GetVolume(self):
return self.vector_a.GetMagnitude() * self.vector_b.GetMagnitude() * self.vector_c.GetMagnitude()
def GetTempVolume(self):
return self.temp_vector_a.GetMagnitude() * self.temp_vector_b.GetMagnitude() * self.temp_vector_c.GetMagnitude()
def WriteTempVolume(self):
fp = open(self.Filename, "a")
fp.write("Temp Volume = %f\n\n" % (self.GetTempVolume()))
fp.close()
class Hexagonal(Structure):
def GetAngles(self):
print("Angles not necessary in Hexagonal structure")
def GetVolume(self):
return self.vector_a.GetMagnitude() * self.vector_b.GetMagnitude * self.vector_c.GetMagnitude() * sin(pi/3)
def GetTempVolume(self):
return self.temp_vector_a.GetMagnitude() * self.temp_vector_b.GetMagnitude() * self.temp_vector_c.GetMagnitude() * sin(pi/3)
def WriteTempVolume(self):
fp = open(self.Filename, "a")
fp.write("Temp Volume = %f\n\n" % (self.GetTempVolume()))
fp.close()
| Davenport-Physics/CS-Volume | Src/Structures.py | Python | mit | 9,113 |
"""The tests for the Script component."""
# pylint: disable=protected-access
import asyncio
from contextlib import contextmanager
from datetime import timedelta
import logging
from unittest import mock
import pytest
import voluptuous as vol
# Otherwise can't test just this file (import order issue)
from homeassistant import exceptions
import homeassistant.components.scene as scene
from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_ON
from homeassistant.core import Context, CoreState, callback
from homeassistant.helpers import config_validation as cv, script
from homeassistant.helpers.event import async_call_later
import homeassistant.util.dt as dt_util
from tests.async_mock import patch
from tests.common import (
async_capture_events,
async_fire_time_changed,
async_mock_service,
)
ENTITY_ID = "script.test"
@pytest.fixture
def mock_timeout(hass, monkeypatch):
"""Mock async_timeout.timeout."""
class MockTimeout:
def __init__(self, timeout):
self._timeout = timeout
self._loop = asyncio.get_event_loop()
self._task = None
self._cancelled = False
self._unsub = None
async def __aenter__(self):
if self._timeout is None:
return self
self._task = asyncio.Task.current_task()
if self._timeout <= 0:
self._loop.call_soon(self._cancel_task)
return self
# Wait for a time_changed event instead of real time passing.
self._unsub = async_call_later(hass, self._timeout, self._cancel_task)
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
if exc_type is asyncio.CancelledError and self._cancelled:
self._unsub = None
self._task = None
raise asyncio.TimeoutError
if self._timeout is not None and self._unsub:
self._unsub()
self._unsub = None
self._task = None
return None
@callback
def _cancel_task(self, now=None):
if self._task is not None:
self._task.cancel()
self._cancelled = True
monkeypatch.setattr(script, "timeout", MockTimeout)
def async_watch_for_action(script_obj, message):
"""Watch for message in last_action."""
flag = asyncio.Event()
@callback
def check_action():
if script_obj.last_action and message in script_obj.last_action:
flag.set()
script_obj.change_listener = check_action
return flag
async def test_firing_event_basic(hass):
"""Test the firing of events."""
event = "test_event"
context = Context()
events = async_capture_events(hass, event)
sequence = cv.SCRIPT_SCHEMA({"event": event, "event_data": {"hello": "world"}})
script_obj = script.Script(hass, sequence)
await script_obj.async_run(context=context)
await hass.async_block_till_done()
assert len(events) == 1
assert events[0].context is context
assert events[0].data.get("hello") == "world"
async def test_firing_event_template(hass):
"""Test the firing of events."""
event = "test_event"
context = Context()
events = async_capture_events(hass, event)
sequence = cv.SCRIPT_SCHEMA(
{
"event": event,
"event_data_template": {
"dict": {
1: "{{ is_world }}",
2: "{{ is_world }}{{ is_world }}",
3: "{{ is_world }}{{ is_world }}{{ is_world }}",
},
"list": ["{{ is_world }}", "{{ is_world }}{{ is_world }}"],
},
}
)
script_obj = script.Script(hass, sequence)
await script_obj.async_run({"is_world": "yes"}, context=context)
await hass.async_block_till_done()
assert len(events) == 1
assert events[0].context is context
assert events[0].data == {
"dict": {1: "yes", 2: "yesyes", 3: "yesyesyes"},
"list": ["yes", "yesyes"],
}
async def test_calling_service_basic(hass):
"""Test the calling of a service."""
context = Context()
calls = async_mock_service(hass, "test", "script")
sequence = cv.SCRIPT_SCHEMA({"service": "test.script", "data": {"hello": "world"}})
script_obj = script.Script(hass, sequence)
await script_obj.async_run(context=context)
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].context is context
assert calls[0].data.get("hello") == "world"
async def test_calling_service_template(hass):
"""Test the calling of a service."""
context = Context()
calls = async_mock_service(hass, "test", "script")
sequence = cv.SCRIPT_SCHEMA(
{
"service_template": """
{% if True %}
test.script
{% else %}
test.not_script
{% endif %}""",
"data_template": {
"hello": """
{% if is_world == 'yes' %}
world
{% else %}
not world
{% endif %}
"""
},
}
)
script_obj = script.Script(hass, sequence)
await script_obj.async_run({"is_world": "yes"}, context=context)
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].context is context
assert calls[0].data.get("hello") == "world"
async def test_multiple_runs_no_wait(hass):
"""Test multiple runs with no wait in script."""
logger = logging.getLogger("TEST")
calls = []
heard_event = asyncio.Event()
async def async_simulate_long_service(service):
"""Simulate a service that takes a not insignificant time."""
fire = service.data.get("fire")
listen = service.data.get("listen")
service_done = asyncio.Event()
@callback
def service_done_cb(event):
logger.debug("simulated service (%s:%s) done", fire, listen)
service_done.set()
calls.append(service)
logger.debug("simulated service (%s:%s) started", fire, listen)
unsub = hass.bus.async_listen(listen, service_done_cb)
hass.bus.async_fire(fire)
await service_done.wait()
unsub()
hass.services.async_register("test", "script", async_simulate_long_service)
@callback
def heard_event_cb(event):
logger.debug("heard: %s", event)
heard_event.set()
sequence = cv.SCRIPT_SCHEMA(
[
{
"service": "test.script",
"data_template": {"fire": "{{ fire1 }}", "listen": "{{ listen1 }}"},
},
{
"service": "test.script",
"data_template": {"fire": "{{ fire2 }}", "listen": "{{ listen2 }}"},
},
]
)
script_obj = script.Script(hass, sequence, script_mode="parallel", max_runs=2)
# Start script twice in such a way that second run will be started while first run
# is in the middle of the first service call.
unsub = hass.bus.async_listen("1", heard_event_cb)
logger.debug("starting 1st script")
hass.async_create_task(
script_obj.async_run(
{"fire1": "1", "listen1": "2", "fire2": "3", "listen2": "4"}
)
)
await asyncio.wait_for(heard_event.wait(), 1)
unsub()
logger.debug("starting 2nd script")
await script_obj.async_run(
{"fire1": "2", "listen1": "3", "fire2": "4", "listen2": "4"}
)
await hass.async_block_till_done()
assert len(calls) == 4
async def test_activating_scene(hass):
"""Test the activation of a scene."""
context = Context()
calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON)
sequence = cv.SCRIPT_SCHEMA({"scene": "scene.hello"})
script_obj = script.Script(hass, sequence)
await script_obj.async_run(context=context)
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].context is context
assert calls[0].data.get(ATTR_ENTITY_ID) == "scene.hello"
@pytest.mark.parametrize("count", [1, 3])
async def test_stop_no_wait(hass, count):
"""Test stopping script."""
service_started_sem = asyncio.Semaphore(0)
finish_service_event = asyncio.Event()
event = "test_event"
events = async_capture_events(hass, event)
async def async_simulate_long_service(service):
"""Simulate a service that takes a not insignificant time."""
service_started_sem.release()
await finish_service_event.wait()
hass.services.async_register("test", "script", async_simulate_long_service)
sequence = cv.SCRIPT_SCHEMA([{"service": "test.script"}, {"event": event}])
script_obj = script.Script(hass, sequence, script_mode="parallel", max_runs=count)
# Get script started specified number of times and wait until the test.script
# service has started for each run.
tasks = []
for _ in range(count):
hass.async_create_task(script_obj.async_run())
tasks.append(hass.async_create_task(service_started_sem.acquire()))
await asyncio.wait_for(asyncio.gather(*tasks), 1)
# Can't assert just yet because we haven't verified stopping works yet.
# If assert fails we can hang test if async_stop doesn't work.
script_was_runing = script_obj.is_running
were_no_events = len(events) == 0
# Begin the process of stopping the script (which should stop all runs), and then
# let the service calls complete.
hass.async_create_task(script_obj.async_stop())
finish_service_event.set()
await hass.async_block_till_done()
assert script_was_runing
assert were_no_events
assert not script_obj.is_running
assert len(events) == 0
async def test_delay_basic(hass, mock_timeout):
"""Test the delay."""
delay_alias = "delay step"
sequence = cv.SCRIPT_SCHEMA({"delay": {"seconds": 5}, "alias": delay_alias})
script_obj = script.Script(hass, sequence)
delay_started_flag = async_watch_for_action(script_obj, delay_alias)
try:
hass.async_create_task(script_obj.async_run())
await asyncio.wait_for(delay_started_flag.wait(), 1)
assert script_obj.is_running
assert script_obj.last_action == delay_alias
except (AssertionError, asyncio.TimeoutError):
await script_obj.async_stop()
raise
else:
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=5))
await hass.async_block_till_done()
assert not script_obj.is_running
assert script_obj.last_action is None
async def test_multiple_runs_delay(hass, mock_timeout):
"""Test multiple runs with delay in script."""
event = "test_event"
events = async_capture_events(hass, event)
delay = timedelta(seconds=5)
sequence = cv.SCRIPT_SCHEMA(
[
{"event": event, "event_data": {"value": 1}},
{"delay": delay},
{"event": event, "event_data": {"value": 2}},
]
)
script_obj = script.Script(hass, sequence, script_mode="parallel", max_runs=2)
delay_started_flag = async_watch_for_action(script_obj, "delay")
try:
hass.async_create_task(script_obj.async_run())
await asyncio.wait_for(delay_started_flag.wait(), 1)
assert script_obj.is_running
assert len(events) == 1
assert events[-1].data["value"] == 1
except (AssertionError, asyncio.TimeoutError):
await script_obj.async_stop()
raise
else:
# Start second run of script while first run is in a delay.
script_obj.sequence[1]["alias"] = "delay run 2"
delay_started_flag = async_watch_for_action(script_obj, "delay run 2")
hass.async_create_task(script_obj.async_run())
await asyncio.wait_for(delay_started_flag.wait(), 1)
async_fire_time_changed(hass, dt_util.utcnow() + delay)
await hass.async_block_till_done()
assert not script_obj.is_running
assert len(events) == 4
assert events[-3].data["value"] == 1
assert events[-2].data["value"] == 2
assert events[-1].data["value"] == 2
async def test_delay_template_ok(hass, mock_timeout):
"""Test the delay as a template."""
sequence = cv.SCRIPT_SCHEMA({"delay": "00:00:{{ 5 }}"})
script_obj = script.Script(hass, sequence)
delay_started_flag = async_watch_for_action(script_obj, "delay")
try:
hass.async_create_task(script_obj.async_run())
await asyncio.wait_for(delay_started_flag.wait(), 1)
assert script_obj.is_running
except (AssertionError, asyncio.TimeoutError):
await script_obj.async_stop()
raise
else:
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=5))
await hass.async_block_till_done()
assert not script_obj.is_running
async def test_delay_template_invalid(hass, caplog):
"""Test the delay as a template that fails."""
event = "test_event"
events = async_capture_events(hass, event)
sequence = cv.SCRIPT_SCHEMA(
[
{"event": event},
{"delay": "{{ invalid_delay }}"},
{"delay": {"seconds": 5}},
{"event": event},
]
)
script_obj = script.Script(hass, sequence)
start_idx = len(caplog.records)
await script_obj.async_run()
await hass.async_block_till_done()
assert any(
rec.levelname == "ERROR" and "Error rendering" in rec.message
for rec in caplog.records[start_idx:]
)
assert not script_obj.is_running
assert len(events) == 1
async def test_delay_template_complex_ok(hass, mock_timeout):
"""Test the delay with a working complex template."""
sequence = cv.SCRIPT_SCHEMA({"delay": {"seconds": "{{ 5 }}"}})
script_obj = script.Script(hass, sequence)
delay_started_flag = async_watch_for_action(script_obj, "delay")
try:
hass.async_create_task(script_obj.async_run())
await asyncio.wait_for(delay_started_flag.wait(), 1)
assert script_obj.is_running
except (AssertionError, asyncio.TimeoutError):
await script_obj.async_stop()
raise
else:
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=5))
await hass.async_block_till_done()
assert not script_obj.is_running
async def test_delay_template_complex_invalid(hass, caplog):
"""Test the delay with a complex template that fails."""
event = "test_event"
events = async_capture_events(hass, event)
sequence = cv.SCRIPT_SCHEMA(
[
{"event": event},
{"delay": {"seconds": "{{ invalid_delay }}"}},
{"delay": {"seconds": 5}},
{"event": event},
]
)
script_obj = script.Script(hass, sequence)
start_idx = len(caplog.records)
await script_obj.async_run()
await hass.async_block_till_done()
assert any(
rec.levelname == "ERROR" and "Error rendering" in rec.message
for rec in caplog.records[start_idx:]
)
assert not script_obj.is_running
assert len(events) == 1
async def test_cancel_delay(hass):
"""Test the cancelling while the delay is present."""
event = "test_event"
events = async_capture_events(hass, event)
sequence = cv.SCRIPT_SCHEMA([{"delay": {"seconds": 5}}, {"event": event}])
script_obj = script.Script(hass, sequence)
delay_started_flag = async_watch_for_action(script_obj, "delay")
try:
hass.async_create_task(script_obj.async_run())
await asyncio.wait_for(delay_started_flag.wait(), 1)
assert script_obj.is_running
assert len(events) == 0
except (AssertionError, asyncio.TimeoutError):
await script_obj.async_stop()
raise
else:
await script_obj.async_stop()
assert not script_obj.is_running
# Make sure the script is really stopped.
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=5))
await hass.async_block_till_done()
assert not script_obj.is_running
assert len(events) == 0
async def test_wait_template_basic(hass):
"""Test the wait template."""
wait_alias = "wait step"
sequence = cv.SCRIPT_SCHEMA(
{
"wait_template": "{{ states.switch.test.state == 'off' }}",
"alias": wait_alias,
}
)
script_obj = script.Script(hass, sequence)
wait_started_flag = async_watch_for_action(script_obj, wait_alias)
try:
hass.states.async_set("switch.test", "on")
hass.async_create_task(script_obj.async_run())
await asyncio.wait_for(wait_started_flag.wait(), 1)
assert script_obj.is_running
assert script_obj.last_action == wait_alias
except (AssertionError, asyncio.TimeoutError):
await script_obj.async_stop()
raise
else:
hass.states.async_set("switch.test", "off")
await hass.async_block_till_done()
assert not script_obj.is_running
assert script_obj.last_action is None
async def test_multiple_runs_wait_template(hass):
"""Test multiple runs with wait_template in script."""
event = "test_event"
events = async_capture_events(hass, event)
sequence = cv.SCRIPT_SCHEMA(
[
{"event": event, "event_data": {"value": 1}},
{"wait_template": "{{ states.switch.test.state == 'off' }}"},
{"event": event, "event_data": {"value": 2}},
]
)
script_obj = script.Script(hass, sequence, script_mode="parallel", max_runs=2)
wait_started_flag = async_watch_for_action(script_obj, "wait")
try:
hass.states.async_set("switch.test", "on")
hass.async_create_task(script_obj.async_run())
await asyncio.wait_for(wait_started_flag.wait(), 1)
assert script_obj.is_running
assert len(events) == 1
assert events[-1].data["value"] == 1
except (AssertionError, asyncio.TimeoutError):
await script_obj.async_stop()
raise
else:
# Start second run of script while first run is in wait_template.
hass.async_create_task(script_obj.async_run())
hass.states.async_set("switch.test", "off")
await hass.async_block_till_done()
assert not script_obj.is_running
assert len(events) == 4
assert events[-3].data["value"] == 1
assert events[-2].data["value"] == 2
assert events[-1].data["value"] == 2
async def test_cancel_wait_template(hass):
"""Test the cancelling while wait_template is present."""
event = "test_event"
events = async_capture_events(hass, event)
sequence = cv.SCRIPT_SCHEMA(
[
{"wait_template": "{{ states.switch.test.state == 'off' }}"},
{"event": event},
]
)
script_obj = script.Script(hass, sequence)
wait_started_flag = async_watch_for_action(script_obj, "wait")
try:
hass.states.async_set("switch.test", "on")
hass.async_create_task(script_obj.async_run())
await asyncio.wait_for(wait_started_flag.wait(), 1)
assert script_obj.is_running
assert len(events) == 0
except (AssertionError, asyncio.TimeoutError):
await script_obj.async_stop()
raise
else:
await script_obj.async_stop()
assert not script_obj.is_running
# Make sure the script is really stopped.
hass.states.async_set("switch.test", "off")
await hass.async_block_till_done()
assert not script_obj.is_running
assert len(events) == 0
async def test_wait_template_not_schedule(hass):
"""Test the wait template with correct condition."""
event = "test_event"
events = async_capture_events(hass, event)
sequence = cv.SCRIPT_SCHEMA(
[
{"event": event},
{"wait_template": "{{ states.switch.test.state == 'on' }}"},
{"event": event},
]
)
script_obj = script.Script(hass, sequence)
hass.states.async_set("switch.test", "on")
await script_obj.async_run()
await hass.async_block_till_done()
assert not script_obj.is_running
assert len(events) == 2
@pytest.mark.parametrize(
"continue_on_timeout,n_events", [(False, 0), (True, 1), (None, 1)]
)
async def test_wait_template_timeout(hass, mock_timeout, continue_on_timeout, n_events):
"""Test the wait template, halt on timeout."""
event = "test_event"
events = async_capture_events(hass, event)
sequence = [
{"wait_template": "{{ states.switch.test.state == 'off' }}", "timeout": 5},
{"event": event},
]
if continue_on_timeout is not None:
sequence[0]["continue_on_timeout"] = continue_on_timeout
sequence = cv.SCRIPT_SCHEMA(sequence)
script_obj = script.Script(hass, sequence)
wait_started_flag = async_watch_for_action(script_obj, "wait")
try:
hass.states.async_set("switch.test", "on")
hass.async_create_task(script_obj.async_run())
await asyncio.wait_for(wait_started_flag.wait(), 1)
assert script_obj.is_running
assert len(events) == 0
except (AssertionError, asyncio.TimeoutError):
await script_obj.async_stop()
raise
else:
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=5))
await hass.async_block_till_done()
assert not script_obj.is_running
assert len(events) == n_events
async def test_wait_template_variables(hass):
"""Test the wait template with variables."""
sequence = cv.SCRIPT_SCHEMA({"wait_template": "{{ is_state(data, 'off') }}"})
script_obj = script.Script(hass, sequence)
wait_started_flag = async_watch_for_action(script_obj, "wait")
try:
hass.states.async_set("switch.test", "on")
hass.async_create_task(script_obj.async_run({"data": "switch.test"}))
await asyncio.wait_for(wait_started_flag.wait(), 1)
assert script_obj.is_running
except (AssertionError, asyncio.TimeoutError):
await script_obj.async_stop()
raise
else:
hass.states.async_set("switch.test", "off")
await hass.async_block_till_done()
assert not script_obj.is_running
async def test_condition_basic(hass):
"""Test if we can use conditions in a script."""
event = "test_event"
events = async_capture_events(hass, event)
sequence = cv.SCRIPT_SCHEMA(
[
{"event": event},
{
"condition": "template",
"value_template": "{{ states.test.entity.state == 'hello' }}",
},
{"event": event},
]
)
script_obj = script.Script(hass, sequence)
hass.states.async_set("test.entity", "hello")
await script_obj.async_run()
await hass.async_block_till_done()
assert len(events) == 2
hass.states.async_set("test.entity", "goodbye")
await script_obj.async_run()
await hass.async_block_till_done()
assert len(events) == 3
@patch("homeassistant.helpers.script.condition.async_from_config")
async def test_condition_created_once(async_from_config, hass):
"""Test that the conditions do not get created multiple times."""
sequence = cv.SCRIPT_SCHEMA(
{
"condition": "template",
"value_template": '{{ states.test.entity.state == "hello" }}',
}
)
script_obj = script.Script(hass, sequence, script_mode="parallel", max_runs=2)
async_from_config.reset_mock()
hass.states.async_set("test.entity", "hello")
await script_obj.async_run()
await script_obj.async_run()
await hass.async_block_till_done()
async_from_config.assert_called_once()
assert len(script_obj._config_cache) == 1
async def test_condition_all_cached(hass):
"""Test that multiple conditions get cached."""
sequence = cv.SCRIPT_SCHEMA(
[
{
"condition": "template",
"value_template": '{{ states.test.entity.state == "hello" }}',
},
{
"condition": "template",
"value_template": '{{ states.test.entity.state != "hello" }}',
},
]
)
script_obj = script.Script(hass, sequence)
hass.states.async_set("test.entity", "hello")
await script_obj.async_run()
await hass.async_block_till_done()
assert len(script_obj._config_cache) == 2
async def test_repeat_count(hass):
"""Test repeat action w/ count option."""
event = "test_event"
events = async_capture_events(hass, event)
count = 3
sequence = cv.SCRIPT_SCHEMA(
{
"repeat": {
"count": count,
"sequence": {
"event": event,
"event_data_template": {
"first": "{{ repeat.first }}",
"index": "{{ repeat.index }}",
"last": "{{ repeat.last }}",
},
},
}
}
)
script_obj = script.Script(hass, sequence)
await script_obj.async_run()
await hass.async_block_till_done()
assert len(events) == count
for index, event in enumerate(events):
assert event.data.get("first") == str(index == 0)
assert event.data.get("index") == str(index + 1)
assert event.data.get("last") == str(index == count - 1)
@pytest.mark.parametrize("condition", ["while", "until"])
async def test_repeat_conditional(hass, condition):
"""Test repeat action w/ while option."""
event = "test_event"
events = async_capture_events(hass, event)
count = 3
sequence = {
"repeat": {
"sequence": [
{
"event": event,
"event_data_template": {
"first": "{{ repeat.first }}",
"index": "{{ repeat.index }}",
},
},
{"wait_template": "{{ is_state('sensor.test', 'next') }}"},
{"wait_template": "{{ not is_state('sensor.test', 'next') }}"},
],
}
}
if condition == "while":
sequence["repeat"]["while"] = {
"condition": "template",
"value_template": "{{ not is_state('sensor.test', 'done') }}",
}
else:
sequence["repeat"]["until"] = {
"condition": "template",
"value_template": "{{ is_state('sensor.test', 'done') }}",
}
script_obj = script.Script(hass, cv.SCRIPT_SCHEMA(sequence))
wait_started = async_watch_for_action(script_obj, "wait")
hass.states.async_set("sensor.test", "1")
hass.async_create_task(script_obj.async_run())
try:
for index in range(2, count + 1):
await asyncio.wait_for(wait_started.wait(), 1)
wait_started.clear()
hass.states.async_set("sensor.test", "next")
await asyncio.wait_for(wait_started.wait(), 1)
wait_started.clear()
hass.states.async_set("sensor.test", index)
await asyncio.wait_for(wait_started.wait(), 1)
hass.states.async_set("sensor.test", "next")
await asyncio.wait_for(wait_started.wait(), 1)
wait_started.clear()
hass.states.async_set("sensor.test", "done")
await asyncio.wait_for(hass.async_block_till_done(), 1)
except asyncio.TimeoutError:
await script_obj.async_stop()
raise
assert len(events) == count
for index, event in enumerate(events):
assert event.data.get("first") == str(index == 0)
assert event.data.get("index") == str(index + 1)
@pytest.mark.parametrize("condition", ["while", "until"])
async def test_repeat_var_in_condition(hass, condition):
"""Test repeat action w/ while option."""
event = "test_event"
events = async_capture_events(hass, event)
sequence = {"repeat": {"sequence": {"event": event}}}
if condition == "while":
sequence["repeat"]["while"] = {
"condition": "template",
"value_template": "{{ repeat.index <= 2 }}",
}
else:
sequence["repeat"]["until"] = {
"condition": "template",
"value_template": "{{ repeat.index == 2 }}",
}
script_obj = script.Script(hass, cv.SCRIPT_SCHEMA(sequence))
with mock.patch(
"homeassistant.helpers.condition._LOGGER.error",
side_effect=AssertionError("Template Error"),
):
await script_obj.async_run()
assert len(events) == 2
async def test_repeat_nested(hass):
"""Test nested repeats."""
event = "test_event"
events = async_capture_events(hass, event)
sequence = cv.SCRIPT_SCHEMA(
[
{
"event": event,
"event_data_template": {
"repeat": "{{ None if repeat is not defined else repeat }}"
},
},
{
"repeat": {
"count": 2,
"sequence": [
{
"event": event,
"event_data_template": {
"first": "{{ repeat.first }}",
"index": "{{ repeat.index }}",
"last": "{{ repeat.last }}",
},
},
{
"repeat": {
"count": 2,
"sequence": {
"event": event,
"event_data_template": {
"first": "{{ repeat.first }}",
"index": "{{ repeat.index }}",
"last": "{{ repeat.last }}",
},
},
}
},
{
"event": event,
"event_data_template": {
"first": "{{ repeat.first }}",
"index": "{{ repeat.index }}",
"last": "{{ repeat.last }}",
},
},
],
}
},
{
"event": event,
"event_data_template": {
"repeat": "{{ None if repeat is not defined else repeat }}"
},
},
]
)
script_obj = script.Script(hass, sequence, "test script")
with mock.patch(
"homeassistant.helpers.condition._LOGGER.error",
side_effect=AssertionError("Template Error"),
):
await script_obj.async_run()
assert len(events) == 10
assert events[0].data == {"repeat": "None"}
assert events[-1].data == {"repeat": "None"}
for index, result in enumerate(
(
("True", "1", "False"),
("True", "1", "False"),
("False", "2", "True"),
("True", "1", "False"),
("False", "2", "True"),
("True", "1", "False"),
("False", "2", "True"),
("False", "2", "True"),
),
1,
):
assert events[index].data == {
"first": result[0],
"index": result[1],
"last": result[2],
}
@pytest.mark.parametrize("var,result", [(1, "first"), (2, "second"), (3, "default")])
async def test_choose(hass, var, result):
"""Test choose action."""
event = "test_event"
events = async_capture_events(hass, event)
sequence = cv.SCRIPT_SCHEMA(
{
"choose": [
{
"conditions": {
"condition": "template",
"value_template": "{{ var == 1 }}",
},
"sequence": {"event": event, "event_data": {"choice": "first"}},
},
{
"conditions": {
"condition": "template",
"value_template": "{{ var == 2 }}",
},
"sequence": {"event": event, "event_data": {"choice": "second"}},
},
],
"default": {"event": event, "event_data": {"choice": "default"}},
}
)
script_obj = script.Script(hass, sequence)
await script_obj.async_run({"var": var})
await hass.async_block_till_done()
assert len(events) == 1
assert events[0].data["choice"] == result
@pytest.mark.parametrize(
"action",
[
{"repeat": {"count": 1, "sequence": {"event": "abc"}}},
{"choose": {"conditions": [], "sequence": {"event": "abc"}}},
{"choose": [], "default": {"event": "abc"}},
],
)
async def test_multiple_runs_repeat_choose(hass, caplog, action):
"""Test parallel runs with repeat & choose actions & max_runs > default."""
max_runs = script.DEFAULT_MAX + 1
script_obj = script.Script(
hass, cv.SCRIPT_SCHEMA(action), script_mode="parallel", max_runs=max_runs
)
events = async_capture_events(hass, "abc")
for _ in range(max_runs):
hass.async_create_task(script_obj.async_run())
await hass.async_block_till_done()
assert "WARNING" not in caplog.text
assert "ERROR" not in caplog.text
assert len(events) == max_runs
async def test_last_triggered(hass):
"""Test the last_triggered."""
event = "test_event"
sequence = cv.SCRIPT_SCHEMA({"event": event})
script_obj = script.Script(hass, sequence)
assert script_obj.last_triggered is None
time = dt_util.utcnow()
with mock.patch("homeassistant.helpers.script.utcnow", return_value=time):
await script_obj.async_run()
await hass.async_block_till_done()
assert script_obj.last_triggered == time
async def test_propagate_error_service_not_found(hass):
"""Test that a script aborts when a service is not found."""
event = "test_event"
events = async_capture_events(hass, event)
sequence = cv.SCRIPT_SCHEMA([{"service": "test.script"}, {"event": event}])
script_obj = script.Script(hass, sequence)
with pytest.raises(exceptions.ServiceNotFound):
await script_obj.async_run()
assert len(events) == 0
assert not script_obj.is_running
async def test_propagate_error_invalid_service_data(hass):
"""Test that a script aborts when we send invalid service data."""
event = "test_event"
events = async_capture_events(hass, event)
calls = async_mock_service(hass, "test", "script", vol.Schema({"text": str}))
sequence = cv.SCRIPT_SCHEMA(
[{"service": "test.script", "data": {"text": 1}}, {"event": event}]
)
script_obj = script.Script(hass, sequence)
with pytest.raises(vol.Invalid):
await script_obj.async_run()
assert len(events) == 0
assert len(calls) == 0
assert not script_obj.is_running
async def test_propagate_error_service_exception(hass):
"""Test that a script aborts when a service throws an exception."""
event = "test_event"
events = async_capture_events(hass, event)
@callback
def record_call(service):
"""Add recorded event to set."""
raise ValueError("BROKEN")
hass.services.async_register("test", "script", record_call)
sequence = cv.SCRIPT_SCHEMA([{"service": "test.script"}, {"event": event}])
script_obj = script.Script(hass, sequence)
with pytest.raises(ValueError):
await script_obj.async_run()
assert len(events) == 0
assert not script_obj.is_running
async def test_referenced_entities(hass):
"""Test referenced entities."""
script_obj = script.Script(
hass,
cv.SCRIPT_SCHEMA(
[
{
"service": "test.script",
"data": {"entity_id": "light.service_not_list"},
},
{
"service": "test.script",
"data": {"entity_id": ["light.service_list"]},
},
{
"condition": "state",
"entity_id": "sensor.condition",
"state": "100",
},
{"service": "test.script", "data": {"without": "entity_id"}},
{"scene": "scene.hello"},
{"event": "test_event"},
{"delay": "{{ delay_period }}"},
]
),
)
assert script_obj.referenced_entities == {
"light.service_not_list",
"light.service_list",
"sensor.condition",
"scene.hello",
}
# Test we cache results.
assert script_obj.referenced_entities is script_obj.referenced_entities
async def test_referenced_devices(hass):
"""Test referenced entities."""
script_obj = script.Script(
hass,
cv.SCRIPT_SCHEMA(
[
{"domain": "light", "device_id": "script-dev-id"},
{
"condition": "device",
"device_id": "condition-dev-id",
"domain": "switch",
},
]
),
)
assert script_obj.referenced_devices == {"script-dev-id", "condition-dev-id"}
# Test we cache results.
assert script_obj.referenced_devices is script_obj.referenced_devices
@contextmanager
def does_not_raise():
"""Indicate no exception is expected."""
yield
async def test_script_mode_single(hass, caplog):
"""Test overlapping runs with max_runs = 1."""
event = "test_event"
events = async_capture_events(hass, event)
sequence = cv.SCRIPT_SCHEMA(
[
{"event": event, "event_data": {"value": 1}},
{"wait_template": "{{ states.switch.test.state == 'off' }}"},
{"event": event, "event_data": {"value": 2}},
]
)
script_obj = script.Script(hass, sequence)
wait_started_flag = async_watch_for_action(script_obj, "wait")
try:
hass.states.async_set("switch.test", "on")
hass.async_create_task(script_obj.async_run())
await asyncio.wait_for(wait_started_flag.wait(), 1)
assert script_obj.is_running
assert len(events) == 1
assert events[0].data["value"] == 1
# Start second run of script while first run is suspended in wait_template.
await script_obj.async_run()
assert "Already running" in caplog.text
assert script_obj.is_running
except (AssertionError, asyncio.TimeoutError):
await script_obj.async_stop()
raise
else:
hass.states.async_set("switch.test", "off")
await hass.async_block_till_done()
assert not script_obj.is_running
assert len(events) == 2
assert events[1].data["value"] == 2
@pytest.mark.parametrize(
"script_mode,messages,last_events",
[("restart", ["Restarting"], [2]), ("parallel", [], [2, 2])],
)
async def test_script_mode_2(hass, caplog, script_mode, messages, last_events):
"""Test overlapping runs with max_runs > 1."""
event = "test_event"
events = async_capture_events(hass, event)
sequence = cv.SCRIPT_SCHEMA(
[
{"event": event, "event_data": {"value": 1}},
{"wait_template": "{{ states.switch.test.state == 'off' }}"},
{"event": event, "event_data": {"value": 2}},
]
)
logger = logging.getLogger("TEST")
max_runs = 1 if script_mode == "restart" else 2
script_obj = script.Script(
hass, sequence, script_mode=script_mode, max_runs=max_runs, logger=logger
)
wait_started_flag = async_watch_for_action(script_obj, "wait")
try:
hass.states.async_set("switch.test", "on")
hass.async_create_task(script_obj.async_run())
await asyncio.wait_for(wait_started_flag.wait(), 1)
assert script_obj.is_running
assert len(events) == 1
assert events[0].data["value"] == 1
# Start second run of script while first run is suspended in wait_template.
wait_started_flag.clear()
hass.async_create_task(script_obj.async_run())
await asyncio.wait_for(wait_started_flag.wait(), 1)
assert script_obj.is_running
assert len(events) == 2
assert events[1].data["value"] == 1
assert all(
any(
rec.levelname == "INFO"
and rec.name == "TEST"
and message in rec.message
for rec in caplog.records
)
for message in messages
)
except (AssertionError, asyncio.TimeoutError):
await script_obj.async_stop()
raise
else:
hass.states.async_set("switch.test", "off")
await hass.async_block_till_done()
assert not script_obj.is_running
assert len(events) == 2 + len(last_events)
for idx, value in enumerate(last_events, start=2):
assert events[idx].data["value"] == value
async def test_script_mode_queued(hass):
"""Test overlapping runs with script_mode = 'queued' & max_runs > 1."""
event = "test_event"
events = async_capture_events(hass, event)
sequence = cv.SCRIPT_SCHEMA(
[
{"event": event, "event_data": {"value": 1}},
{"wait_template": "{{ states.switch.test.state == 'off' }}"},
{"event": event, "event_data": {"value": 2}},
{"wait_template": "{{ states.switch.test.state == 'on' }}"},
]
)
logger = logging.getLogger("TEST")
script_obj = script.Script(
hass, sequence, script_mode="queued", max_runs=2, logger=logger
)
wait_started_flag = async_watch_for_action(script_obj, "wait")
try:
hass.states.async_set("switch.test", "on")
hass.async_create_task(script_obj.async_run())
await asyncio.wait_for(wait_started_flag.wait(), 1)
assert script_obj.is_running
assert len(events) == 1
assert events[0].data["value"] == 1
# Start second run of script while first run is suspended in wait_template.
# This second run should not start until the first run has finished.
hass.async_create_task(script_obj.async_run())
await asyncio.sleep(0)
assert script_obj.is_running
assert len(events) == 1
wait_started_flag.clear()
hass.states.async_set("switch.test", "off")
await asyncio.wait_for(wait_started_flag.wait(), 1)
assert script_obj.is_running
assert len(events) == 2
assert events[1].data["value"] == 2
wait_started_flag.clear()
hass.states.async_set("switch.test", "on")
await asyncio.wait_for(wait_started_flag.wait(), 1)
await asyncio.sleep(0)
assert script_obj.is_running
assert len(events) == 3
assert events[2].data["value"] == 1
except (AssertionError, asyncio.TimeoutError):
await script_obj.async_stop()
raise
else:
hass.states.async_set("switch.test", "off")
await asyncio.sleep(0)
hass.states.async_set("switch.test", "on")
await hass.async_block_till_done()
assert not script_obj.is_running
assert len(events) == 4
assert events[3].data["value"] == 2
async def test_script_logging(hass, caplog):
"""Test script logging."""
script_obj = script.Script(hass, [], "Script with % Name")
script_obj._log("Test message with name %s", 1)
assert "Script with % Name: Test message with name 1" in caplog.text
script_obj = script.Script(hass, [])
script_obj._log("Test message without name %s", 2)
assert "Test message without name 2" in caplog.text
async def test_shutdown_at(hass, caplog):
"""Test stopping scripts at shutdown."""
delay_alias = "delay step"
sequence = cv.SCRIPT_SCHEMA({"delay": {"seconds": 120}, "alias": delay_alias})
script_obj = script.Script(hass, sequence, "test script")
delay_started_flag = async_watch_for_action(script_obj, delay_alias)
try:
hass.async_create_task(script_obj.async_run())
await asyncio.wait_for(delay_started_flag.wait(), 1)
assert script_obj.is_running
assert script_obj.last_action == delay_alias
except (AssertionError, asyncio.TimeoutError):
await script_obj.async_stop()
raise
else:
hass.bus.async_fire("homeassistant_stop")
await hass.async_block_till_done()
assert not script_obj.is_running
assert "Stopping scripts running at shutdown: test script" in caplog.text
async def test_shutdown_after(hass, caplog):
"""Test stopping scripts at shutdown."""
delay_alias = "delay step"
sequence = cv.SCRIPT_SCHEMA({"delay": {"seconds": 120}, "alias": delay_alias})
script_obj = script.Script(hass, sequence, "test script")
delay_started_flag = async_watch_for_action(script_obj, delay_alias)
hass.state = CoreState.stopping
hass.bus.async_fire("homeassistant_stop")
await hass.async_block_till_done()
try:
hass.async_create_task(script_obj.async_run())
await asyncio.wait_for(delay_started_flag.wait(), 1)
assert script_obj.is_running
assert script_obj.last_action == delay_alias
except (AssertionError, asyncio.TimeoutError):
await script_obj.async_stop()
raise
else:
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=60))
await hass.async_block_till_done()
assert not script_obj.is_running
assert (
"Stopping scripts running too long after shutdown: test script"
in caplog.text
)
async def test_update_logger(hass, caplog):
"""Test updating logger."""
sequence = cv.SCRIPT_SCHEMA({"event": "test_event"})
script_obj = script.Script(hass, sequence)
await script_obj.async_run()
await hass.async_block_till_done()
assert script.__name__ in caplog.text
log_name = "testing.123"
script_obj.update_logger(logging.getLogger(log_name))
await script_obj.async_run()
await hass.async_block_till_done()
assert log_name in caplog.text
| pschmitt/home-assistant | tests/helpers/test_script.py | Python | apache-2.0 | 46,540 |
# -*- coding: utf-8 -*-
# Exploded Assembly Animation workbench for FreeCAD
# (c) 2016 Javier Martínez García
#***************************************************************************
#* (c) Javier Martínez García 2016 *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU General Public License (GPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Lesser General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with FreeCAD; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************/
from __future__ import division
import os
import time
import FreeCAD
import Part
from pivy import coin
"""
class AnimationCamera:
def __init__(self, obj):
obj.addProperty('App::PropertyBool', 'Enable')
obj.addProperty('App::PropertyString', 'RunFrom', 'Interval')
obj.addProperty('App::PropertyString', 'RunTo', 'Interval')
# edge trajectory
obj.addProperty('App::PropertyBool', 'EdgeTrajectory', 'Follow Edge')
obj.addProperty('App::PropertyString','ShapeName', 'Follow Edge')
obj.addProperty('App::PropertyFloat', 'EdgeNumber', 'Follow Edge')
# Manual trajectory
obj.addProperty('App::PropertyBool', 'ManualTrajectory', 'Manual trajectory')
obj.addProperty('App::PropertyVector', 'InitialCameraBase', 'Manual trajectory')
obj.addProperty('App::PropertyVector', 'InitialCameraLookPoint', 'Manual trajectory')
obj.addProperty('App::PropertyVector', 'FinalCameraBase', 'Manual trajectory')
obj.addProperty('App::PropertyVector', 'FinalCameraLookPoint', 'Manual trajectory')
obj.addProperty('App::PropertyStringList', 'TransitionMode', 'Manual trajectory').TransitionMode = 'Frame', 'Smooth'
# Attached trajectory
obj.addPropertyd
"""
class ManualAnimationCamera:
def __init__(self, obj):
obj.addProperty('App::PropertyBool', 'Enable', 'Enable Camera')
obj.addProperty('App::PropertyString', 'RunFrom', 'Interval')
obj.addProperty('App::PropertyString', 'RunTo', 'Interval')
obj.addProperty('App::PropertyVector', 'InitialCameraBase', 'Camera Position')
obj.addProperty('App::PropertyVector', 'FinalCameraBase', 'Camera Position')
obj.addProperty('App::PropertyVector', 'InitialCameraLookPoint', 'Camera Position')
obj.addProperty('App::PropertyVector', 'FinalCameraLookPoint', 'Camera Position')
obj.addProperty('App::PropertyEnumeration', 'Transition', 'Camera Transition').Transition = ['Sudden', 'Linear']
class ManualAnimationCameraViewProvider:
def __init__(self, obj):
obj.Proxy = self
def getIcon(self):
__dir__ = os.path.dirname(__file__)
return __dir__ + '/icons/AnimationCameraManual.svg'
def createManualCamera():
# retrieve selection
initial_obj = FreeCAD.Gui.Selection.getSelectionEx()[0].Object.Name
final_obj = FreeCAD.Gui.Selection.getSelectionEx()[1].Object.Name
EAFolder = FreeCAD.ActiveDocument.ExplodedAssembly
MCObj = FreeCAD.ActiveDocument.addObject('App::FeaturePython', 'ManualCamera')
ManualAnimationCamera(MCObj)
ManualAnimationCameraViewProvider(MCObj.ViewObject)
EAFolder.addObject(MCObj)
# add selection to camera from-to
MCObj.RunFrom = initial_obj
MCObj.RunTo = final_obj
# organize inside folder
FreeCAD.Gui.Selection.clearSelection()
FreeCAD.Gui.Selection.addSelection(MCObj)
FreeCAD.Gui.Selection.addSelection(EAFolder.Group[0])
from ExplodedAssembly import placeBeforeSelectedTrajectory
placeBeforeSelectedTrajectory()
FreeCAD.Console.PrintMessage('\nManual camera created\n')
"""from FreeCAD import Base
cam = FreeCADGui.ActiveDocument.ActiveView.getCameraNode()
trajectory = Gui.Selection.getSelectionEx()[0].Object.Shape.Edges
for edge in trajectory:
startPoint = edge.valueAt( 0.0 )
endPoint = edge.valueAt( edge.Length )
dirVector = ( endPoint - startPoint ).normalize()
currentPoint = startPoint
while (currentPoint - startPoint).Length < edge.Length:
currentPoint = currentPoint + dirVector
cam.position.setValue(currentPoint + Base.Vector( 0,0, 10) )
cam.pointAt( coin.SbVec3f( endPoint[0], endPoint[1], endPoint[2]+10) , coin.SbVec3f( 0, 0, 1 ) )
Gui.updateGui()
time.sleep(0.005)
"""
| JMG1/ExplodedAssembly | CameraAnimation.py | Python | gpl-2.0 | 5,428 |
from art_instructions.brain import BrainFSM, BrainInstruction
from transitions import State
import rospy
class GetReady(BrainInstruction):
pass
class GetReadyLearn(GetReady):
pass
class GetReadyRun(GetReady):
pass
class GetReadyFSM(BrainFSM):
states = [
State(name='get_ready', on_enter=[
'state_update_program_item', 'check_robot_in', 'state_get_ready'], on_exit=['check_robot_out']),
State(name='learning_get_ready_run', on_enter=[
'check_robot_in', 'learning_load_block_id', 'state_learning_get_ready_run'],
on_exit=['check_robot_out'])
]
transitions = [
('get_ready', 'program_run', 'get_ready'),
('done', 'get_ready', 'program_load_instruction'),
('error', 'get_ready', 'program_error'),
('get_ready_run', 'learning_run', 'learning_get_ready_run'),
('done', 'learning_get_ready_run', 'learning_run'),
('error', 'learning_get_ready_run', 'learning_step_error')
]
state_functions = [
'state_get_ready',
'state_learning_get_ready_run'
]
def run(self):
self.fsm.get_ready()
def learning_run(self):
self.fsm.get_ready_run()
def state_get_ready(self, event):
rospy.logdebug('Current state: state_get_ready')
if not self.brain.check_robot():
return
self.brain.state_manager.update_program_item(
self.brain.ph.get_program_id(), self.brain.block_id, self.brain.instruction)
# TODO: call some service to set PR2 to ready position
# TODO handle if it fails
severity, error, arm_id = self.brain.robot.arms_get_ready()
if error is not None:
rospy.logerr("Error while geting ready: " + arm_id)
self.fsm.error(severity=severity, error=error)
else:
self.fsm.done(success=True)
def state_learning_get_ready_run(self, event):
rospy.logdebug('Current state: state_get_ready')
if not self.brain.check_robot():
return
self.brain.state_manager.update_program_item(
self.brain.ph.get_program_id(), self.brain.block_id, self.brain.instruction)
# TODO: call some service to set PR2 to ready position
# TODO handle if it fails
severity, error, arm_id = self.brain.robot.arms_get_ready()
if error is not None:
rospy.logerr("Error while geting ready: " + arm_id)
self.fsm.error(severity=severity, error=error)
else:
self.fsm.done(success=True)
| robofit/ar-table-itable | art_instructions/src/art_instructions/brain/get_ready.py | Python | lgpl-2.1 | 2,567 |
#!/usr/bin/python
# Copyright: (c) 2018, Pluribus Networks
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: pn_stp
author: "Pluribus Networks (@rajaspachipulusu17)"
version_added: "2.8"
short_description: CLI command to modify stp
description:
- This module can be used to modify Spanning Tree Protocol parameters.
options:
pn_cliswitch:
description:
- Target switch to run the CLI on.
type: str
required: false
state:
description:
- State the action to perform. Use C(update) to stp.
type: str
required: true
choices: ['update']
pn_hello_time:
description:
- STP hello time between 1 and 10 secs.
type: str
default: '2'
pn_enable:
description:
- enable or disable STP
type: bool
pn_root_guard_wait_time:
description:
- root guard wait time between 0 and 300 secs. 0 to disable wait.
type: str
default: '20'
pn_bpdus_bridge_ports:
description:
- BPDU packets to bridge specific port.
type: bool
pn_mst_max_hops:
description:
- maximum hop count for mstp bpdu.
type: str
default: '20'
pn_bridge_id:
description:
- STP bridge id.
type: str
pn_max_age:
description:
- maximum age time between 6 and 40 secs.
type: str
default: '20'
pn_stp_mode:
description:
- STP mode.
type: str
choices: ['rstp', 'mstp']
pn_mst_config_name:
description:
- Name for MST Configuration Instance.
type: str
pn_forwarding_delay:
description:
- STP forwarding delay between 4 and 30 secs.
type: str
default: '15'
pn_bridge_priority:
description:
- STP bridge priority.
type: str
default: '32768'
"""
EXAMPLES = """
- name: Modify stp
pn_stp:
pn_cliswitch: "sw01"
state: "update"
pn_hello_time: "3"
pn_stp_mode: "rstp"
"""
RETURN = """
command:
description: the CLI command run on the target node.
returned: always
type: str
stdout:
description: set of responses from the stp command.
returned: always
type: list
stderr:
description: set of error responses from the stp command.
returned: on error
type: list
changed:
description: indicates whether the CLI caused changes on the target.
returned: always
type: bool
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.netvisor.pn_nvos import pn_cli, run_cli, booleanArgs
def main():
""" This section is for arguments parsing """
state_map = dict(
update='stp-modify'
)
module = AnsibleModule(
argument_spec=dict(
pn_cliswitch=dict(required=False, type='str'),
state=dict(required=True, type='str',
choices=state_map.keys()),
pn_hello_time=dict(required=False, type='str', default='2'),
pn_enable=dict(required=False, type='bool'),
pn_root_guard_wait_time=dict(required=False, type='str', default='20'),
pn_bpdus_bridge_ports=dict(required=False, type='bool'),
pn_mst_max_hops=dict(required=False, type='str', default='20'),
pn_bridge_id=dict(required=False, type='str'),
pn_max_age=dict(required=False, type='str', default='20'),
pn_stp_mode=dict(required=False, type='str',
choices=['rstp', 'mstp']),
pn_mst_config_name=dict(required=False, type='str'),
pn_forwarding_delay=dict(required=False, type='str', default='15'),
pn_bridge_priority=dict(required=False, type='str', default='32768'),
),
required_one_of=[['pn_enable', 'pn_hello_time',
'pn_root_guard_wait_time',
'pn_bpdus_bridge_ports',
'pn_mst_max_hops',
'pn_bridge_id',
'pn_max_age',
'pn_stp_mode',
'pn_mst_config_name',
'pn_forwarding_delay',
'pn_bridge_priority']]
)
# Accessing the arguments
cliswitch = module.params['pn_cliswitch']
state = module.params['state']
hello_time = module.params['pn_hello_time']
enable = module.params['pn_enable']
root_guard_wait_time = module.params['pn_root_guard_wait_time']
bpdus_bridge_ports = module.params['pn_bpdus_bridge_ports']
mst_max_hops = module.params['pn_mst_max_hops']
bridge_id = module.params['pn_bridge_id']
max_age = module.params['pn_max_age']
stp_mode = module.params['pn_stp_mode']
mst_config_name = module.params['pn_mst_config_name']
forwarding_delay = module.params['pn_forwarding_delay']
bridge_priority = module.params['pn_bridge_priority']
command = state_map[state]
# Building the CLI command string
cli = pn_cli(module, cliswitch)
if command == 'stp-modify':
cli += ' %s ' % command
if hello_time:
cli += ' hello-time ' + hello_time
if root_guard_wait_time:
cli += ' root-guard-wait-time ' + root_guard_wait_time
if mst_max_hops:
cli += ' mst-max-hops ' + mst_max_hops
if bridge_id:
cli += ' bridge-id ' + bridge_id
if max_age:
cli += ' max-age ' + max_age
if stp_mode:
cli += ' stp-mode ' + stp_mode
if mst_config_name:
cli += ' mst-config-name ' + mst_config_name
if forwarding_delay:
cli += ' forwarding-delay ' + forwarding_delay
if bridge_priority:
cli += ' bridge-priority ' + bridge_priority
cli += booleanArgs(enable, 'enable', 'disable')
cli += booleanArgs(bpdus_bridge_ports, 'bpdus-bridge-ports', 'bpdus-all-ports')
run_cli(module, cli, state_map)
if __name__ == '__main__':
main()
| alxgu/ansible | lib/ansible/modules/network/netvisor/pn_stp.py | Python | gpl-3.0 | 6,175 |
"""Parameter randomization
==========================
Provides the optional randomization for the parameters of a
:class:`~ceed.function.FuncBase`. Each parameter of the function may be
randomized according to :attr:`~ceed.function.FuncBase.noisy_parameters`, that
attaches a distribution to the parameter.
This module provides a :class:`ParameterNoiseFactory` used to register
random distribution classes and it provides some built in distributions.
Distributions can also be extended using the :mod:`~ceed.function.plugin`
interface.
"""
from typing import Dict, Type, TypeVar, List
from kivy.event import EventDispatcher
from kivy.properties import NumericProperty, BooleanProperty
__all__ = ('ParameterNoiseFactory', 'NoiseBase', 'NoiseType')
NoiseType = TypeVar('NoiseType', bound='NoiseBase')
"""The type-hint type for :class:`NoiseBase`.
"""
class ParameterNoiseFactory(EventDispatcher):
"""Factory where distributions are registered and accessed by name.
"""
noise_classes: Dict[str, Type[NoiseType]] = {}
"""Keeps all classes registered with :meth:`register_class`.
"""
def __init__(self, **kwargs):
super(ParameterNoiseFactory, self).__init__(**kwargs)
self.noise_classes = {}
def register_class(self, cls: Type[NoiseType]):
"""Registers a :class:`NoiseBase` subclass, with the name of the class
in :attr:`noise_classes`.
"""
self.noise_classes[cls.__name__] = cls
def get_cls(self, name: str) -> Type[NoiseType]:
"""Looks up a noise class by name and returns it.
"""
return self.noise_classes[name]
def make_instance(self, config: dict) -> 'NoiseBase':
"""Takes a noise distribution instance's config, as returned by
:meth:`NoiseBase.get_config`, and creates a noise distribution instance
of that class and config, and returns it.
"""
cls = self.get_cls(config['cls'])
instance = cls(**{k: v for k, v in config.items() if k != 'cls'})
return instance
class NoiseBase(EventDispatcher):
"""Base class that can be used to randomize a function parameter with
:attr:`~ceed.function.FuncBase.noisy_parameters`.
Instances have a :meth:`sample` method that returns a random value when
called. This is used to sample a new value for function parameters.
"""
lock_after_forked: bool = BooleanProperty(False)
"""Functions can reference other function. After the reference functions
are expanded and copied before running the stage as an experiment, all
randomized parameters whose :attr:`lock_after_forked` is False are
resampled.
This allows the parameters with :attr:`lock_after_forked` set to True to
share the same random value as the original referenced function's
randomized value.
See :meth:`ceed.stage.CeedStage.copy_and_resample` for details.
"""
sample_each_loop = BooleanProperty(False)
"""Whether the parameter should be resampled for each loop iteration
(True) or whether we sample once and use that sample for all loop
iterations.
The values are pre-sampled before the function is executed. If True, using
:meth:`sample_seq`, otherwise, it's sampled once with :meth:`sample`.
For example, for the following function structure contained in a Stage::
CeedStage:
name: 'stage'
loop: 2
GroupFunc:
name: 'root'
loop: 5
GroupFunc:
name: 'child_a'
loop: 2
ConstFunc:
name: 'child'
loop: 3
where the ``child`` function's ``a`` parameter is randomized and
the ``child`` function is looped ``2 * 5 * 2 * 3 = 60`` times total across
the whole experiment.
Then, if :attr:`sample_each_loop` is False, we :meth:`sample` the parameter
once and the same value is used for all 60 loop iterations. Otherwise, we
pre-compute 60 samples using :meth:`sample_seq` from
:meth:`~ceed.function.FuncBase.resample_parameters` and then update the
parameter with each corresponding sample when the function or loop
iteration is initialized (:meth:`~ceed.function.FuncBase.init_func` and
:meth:`~ceed.function.FuncBase.init_loop_iteration`).
"""
def sample(self) -> float:
"""Samples the distribution and returns a new value.
"""
raise NotImplementedError
def sample_seq(self, n) -> List[float]:
"""Samples the distribution ``n`` times and returns a list of values.
By default it just calls :meth:`sample` ``n`` times to get the samples.
"""
return [self.sample() for _ in range(n)]
@property
def name(self) -> str:
"""The name of the class.
This is the name used with :attr:`ParameterNoiseFactory.get_cls`.
"""
return self.__class__.__name__
def get_config(self) -> dict:
"""Returns a dict representation of the instance that can be then
be used to reconstruct it with
:meth:`ParameterNoiseFactory.make_instance`.
This is also used to display the instance parameters to the user.
We infer the type of each parameter from the property value.
"""
return {
'cls': self.name,
'lock_after_forked': self.lock_after_forked,
'sample_each_loop': self.sample_each_loop}
def get_prop_pretty_name(self) -> Dict[str, str]:
"""Returns a dict mapping names of the parameters used by the class
to a nicer representation shown to the user.
"""
return {
'lock_after_forked': 'Lock after fork',
'sample_each_loop': 'Resample each loop',
}
| matham/Ceed | ceed/function/param_noise.py | Python | mit | 5,803 |
from filer.models import *
from django.core.files import File as DjangoFile
from os.path import basename
from urlparse import urlsplit
import urllib2
def url2name(url):
return basename(urlsplit(url)[2])
def download(url, dir):
local_name = url2name(url)
local_dir = dir
local_path = '%s/%s' % (local_dir, local_name)
req = urllib2.Request(url)
r = urllib2.urlopen(req)
if r.info().has_key('Content-Disposition'):
# If the response has Content-Disposition, we take file name from it
local_name = r.info()['Content-Disposition'].split('filename=')[1]
if local_name[0] == '"' or local_name[0] == "'":
local_name = local_name[1:-1]
elif r.url != url:
# if we were redirected, the real file name we take from the final URL
local_name = url2name(r.url)
f = open(local_path, 'wb')
f.write(r.read())
f.close()
return local_name, local_path
def url_to_file(url, folder):
# url = 'http://www.astrosurf.com/astrospace/images/ss/Satellite%2008.jpg'
local_name, local_path = download(url, 'tmp')
dj_file = DjangoFile(open(local_path), name=local_name)
obj, created = Image.objects.get_or_create(
original_filename=local_name,
file=dj_file,
folder=folder,
is_public=True)
os.remove(local_path)
return obj | hzlf/openbroadcast | website/lib/util/filer_extra.py | Python | gpl-3.0 | 1,532 |
#
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2014 Star2Billing S.L.
#
# The Initial Developer of the Original Code is
# Arezqui Belaid <[email protected]>
#
"""This file was generated with the customdashboard management command, it
contains the two classes for the main dashboard and app index dashboard.
You can customize these classes as you want.
To activate your index dashboard add the following to your settings.py::
ADMIN_TOOLS_INDEX_DASHBOARD = 'dashboard.CustomIndexDashboard'
And to activate the app index dashboard::
ADMIN_TOOLS_APP_INDEX_DASHBOARD = 'dashboard.CustomAppIndexDashboard'"""
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
from admin_tools.dashboard import modules, Dashboard, AppIndexDashboard
from admin_tools_stats.modules import DashboardCharts, get_active_graph
#from admin_tools.utils import get_admin_site_name
from django.conf import settings
class HistoryDashboardModule(modules.LinkList):
title = 'History'
def init_with_context(self, context):
request = context['request']
# we use sessions to store the visited pages stack
history = request.session.get('history', [])
for item in history:
self.children.append(item)
# add the current page to the history
history.insert(0, {
'title': context['title'],
'url': request.META['PATH_INFO'],
})
if len(history) > 10:
history = history[:10]
request.session['history'] = history
class CustomIndexDashboard(Dashboard):
"""Custom index dashboard"""
def init_with_context(self, context):
request = context['request']
# we want a 3 columns layout
self.columns = 3
self.children.append(modules.Group(
title=_("general").capitalize(),
display="tabs",
children=[
modules.AppList(
title=_('user').capitalize(),
models=('django.contrib.*', 'user_profile.*', 'agent.*', ),
),
modules.AppList(
_('task manager').title(),
models=('djcelery.*', ),
),
modules.AppList(
_('dashboard stats').capitalize(),
models=('admin_tools_stats.*', ),
),
modules.RecentActions(_('recent actions').capitalize(), 5),
]
))
self.children.append(modules.AppList(
_('callcenter').title(),
models=('callcenter.*', ),
))
self.children.append(modules.AppList(
_('settings').capitalize(),
models=('dialer_settings.*', ),
))
# append an app list module for "Dialer"
self.children.append(modules.AppList(
_('VoIP dialer').title(),
models=('dialer_cdr.*', 'dialer_gateway.*',
'dialer_contact.*', 'dialer_campaign.*', ),
))
# append an app list module for "Dialer"
self.children.append(modules.AppList(
_('surveys').capitalize(),
models=('survey.*', ),
))
self.children.append(modules.AppList(
_('SMS Gateway'),
models=('sms.*', ),
))
# append an app list module for "SMS"
self.children.append(modules.AppList(
_('SMS module'),
models=('sms_module.*', ),
))
# append an app list module for "Dialer"
self.children.append(modules.AppList(
_('audio files').title(),
models=('audiofield.*', ),
))
self.children.append(modules.AppList(
_('do not call').title(),
models=('dnc.*', ),
))
self.children.append(modules.AppList(
_('appointment').title(),
models=('appointment.*', ),
))
self.children.append(modules.AppList(
_('mod_mailer').title(),
models=('mod_mailer.*', ),
))
self.children.append(modules.LinkList(
_('Reporting'),
draggable=True,
deletable=True,
collapsible=True,
children=[
[_('Call Daily Report'),
reverse('admin:dialer_cdr_voipcall_changelist') + 'voip_daily_report/'],
],
))
# append a link list module for "quick links"
#"""
# site_name = get_admin_site_name(context)
#Quick link seems to broke the admin design if too many element
self.children.append(modules.LinkList(
_('Quick links'),
layout='inline',
draggable=True,
deletable=True,
collapsible=True,
children=[
[_('Newfies-Dialer Website'), 'http://www.newfies-dialer.org/'],
[_('Support'), 'http://www.newfies-dialer.org/about-us/contact/'],
[_('Add-ons'), 'http://www.newfies-dialer.org/add-ons/'],
# [_('Change password'), reverse('%s:password_change' % site_name)],
# [_('Log out'), reverse('%s:logout' % site_name)],
],
))
#"""
if not settings.DEBUG:
# append a feed module
self.children.append(modules.Feed(
_('Latest Newfies-Dialer News'),
feed_url='http://www.newfies-dialer.org/category/blog/feed/',
limit=5
))
# append an app list module for "Country_prefix"
self.children.append(modules.AppList(
_('dashboard stats settings').title(),
models=('admin_dashboard_stats.*', ),
))
# Copy following code into your custom dashboard
graph_list = get_active_graph()
for i in graph_list:
kwargs = {}
kwargs['require_chart_jscss'] = False
kwargs['graph_key'] = i.graph_key
if request.POST.get('select_box_' + i.graph_key):
kwargs['select_box_' + i.graph_key] = request.POST['select_box_' + i.graph_key]
self.children.append(DashboardCharts(**kwargs))
class CustomAppIndexDashboard(AppIndexDashboard):
"""Custom app index dashboard for admin."""
# we disable title because its redundant with the model list module
title = ''
def __init__(self, *args, **kwargs):
AppIndexDashboard.__init__(self, *args, **kwargs)
#TODO: Find out better way
if str(self.app_title) == 'Dialer_Settings':
app_title = _('dialer settings').title()
models = ['dialer_settings.*']
elif str(self.app_title) == 'Dialer_Campaign':
app_title = _('dialer campaign').title()
models = ['dialer_campaign.*']
elif str(self.app_title) == 'Dialer_Contact':
app_title = _('dialer contact').title()
models = ['dialer_contact.*']
elif str(self.app_title) == 'Dialer_Cdr':
app_title = _('Dialer CDR')
models = ['dialer_cdr.*']
elif str(self.app_title) == 'Dialer_Gateway':
app_title = _('dialer gateway').title()
models = ['dialer_gateway.*']
elif str(self.app_title) == 'Country_Dialcode':
app_title = _('country dialcode').title()
models = ['country_dialcode.*']
elif str(self.app_title) == 'Dnc':
app_title = _('do not call').title()
models = ['dnc.*']
else:
app_title = self.app_title
models = self.models
# append a model list module and a recent actions module
self.children += [
#modules.ModelList(self.app_title, self.models),
modules.ModelList(app_title, models),
modules.RecentActions(
_('recent actions').title(),
include_list=self.get_app_content_types(),
limit=5,
),
]
def init_with_context(self, context):
"""Use this method if you need to access the request context."""
return super(CustomAppIndexDashboard, self).init_with_context(context)
| gale320/newfies-dialer | newfies/custom_admin_tools/dashboard.py | Python | mpl-2.0 | 8,479 |
from django.conf import settings
import logging
from django.utils.encoding import smart_unicode
from django.core.urlresolvers import reverse, NoReverseMatch
from splango.models import Subject, Experiment, Enrollment, GoalRecord
SPLANGO_STATE = "SPLANGO_STATE"
SPLANGO_SUBJECT = "SPLANGO_SUBJECT"
SPLANGO_QUEUED_UPDATES = "SPLANGO_QUEUED_UPDATES"
S_UNKNOWN = "UNKNOWN"
S_HUMAN = "HUMAN"
# borrowed from debug_toolbar
_HTML_TYPES = ('text/html', 'application/xhtml+xml')
# borrowed from debug_toolbar
def replace_insensitive(string, target, replacement):
"""
Similar to string.replace() but is case insensitive
Code borrowed from: http://forums.devshed.com/python-programming-11/case-insensitive-string-replace-490921.html
"""
no_case = string.lower()
index = no_case.rfind(target.lower())
if index >= 0:
return string[:index] + replacement + string[index + len(target):]
else: # no results so return the original string
return string
class RequestExperimentManager:
def __init__(self, request):
#logging.debug("REM init")
self.request = request
self.user_at_init = request.user
self.queued_actions = []
if self.request.session.get(SPLANGO_STATE) is None:
self.request.session[SPLANGO_STATE] = S_UNKNOWN
if self.is_first_visit():
logging.info("SPLANGO! First visit!")
first_visit_goalname = getattr(settings,
"SPLANGO_FIRST_VISIT_GOAL",
None)
if first_visit_goalname:
self.log_goal(first_visit_goalname)
def enqueue(self, action, params):
self.queued_actions.append( (action, params) )
def process_from_queue(self, action, params):
logging.info("SPLANGO! dequeued: %s (%s)" % (str(action), repr(params)))
if action == "enroll":
exp = Experiment.objects.get(name=params["exp_name"])
exp.enroll_subject_as_variant(self.get_subject(),
params["variant"])
elif action == "log_goal":
g = GoalRecord.record(self.get_subject(),
params["goal_name"],
params["request_info"],
extra=params.get("extra"))
logging.info("SPLANGO! goal! %s" % str(g))
else:
raise RuntimeError("Unknown queue action '%s'." % action)
def is_first_visit(self):
r = self.request
if r.user.is_authenticated():
return False
ref = r.META.get("HTTP_REFERER", "").lower()
if not ref: # if no referer, then musta just typed it in
return True
if ref.startswith("http://"):
ref = ref[7:]
elif ref.startswith("https://"):
ref = ref[8:]
return not(ref.startswith(r.get_host()))
def render_js(self):
logging.info("SPLANGO! render_js")
prejs = ""
postjs = ""
if settings.DEBUG:
prejs = "try { "
postjs = ' } catch(e) { alert("DEBUG notice: Splango encountered a javascript error when attempting to confirm this user as a human. Is jQuery loaded?\\n\\nYou may notice inconsistent experiment enrollments until this is fixed.\\n\\nDetails:\\n"+e.toString()); }'
try:
url = reverse("splango-confirm-human")
except NoReverseMatch:
url = "/splango/confirm_human/"
return """<script type='text/javascript'>%sjQuery.get("%s");%s</script>""" % (prejs, url, postjs)
def confirm_human(self, reqdata=None):
logging.info("SPLANGO! Human confirmed!")
self.request.session[SPLANGO_STATE] = S_HUMAN
for (action, params) in self.request.session.get(SPLANGO_QUEUED_UPDATES, []):
self.process_from_queue(action, params)
def finish(self, response):
curstate = self.request.session.get(SPLANGO_STATE, S_UNKNOWN)
#logging.info("SPLANGO! finished... state=%s" % curstate)
curuser = self.request.user
if self.user_at_init != curuser:
logging.info("SPLANGO! user status changed over request: %s --> %s" % (str(self.user_at_init), str(curuser)))
if not(curuser.is_authenticated()):
# User logged out. It's a new session, nothing special.
pass
else:
# User has just logged in (or registered).
# We'll merge the session's current Subject with
# an existing Subject for this user, if exists,
# or simply set the subject.registered_as field.
self.request.session[SPLANGO_STATE] = S_HUMAN
# logging in counts as being proved a human
old_subject = self.request.session.get(SPLANGO_SUBJECT)
try:
existing_subject = Subject.objects.get(registered_as=curuser)
# there is an existing registered subject!
if old_subject and old_subject.id != existing_subject.id:
# merge old subject's activity into new
old_subject.merge_into(existing_subject)
# whether we had an old_subject or not, we must
# set session to use our existing_subject
self.request.session[SPLANGO_SUBJECT] = existing_subject
except Subject.DoesNotExist:
# promote current subject to registered!
sub = self.get_subject()
sub.registered_as = curuser
sub.save()
if curstate == S_HUMAN:
# run anything in my queue
for (action, params) in self.queued_actions:
self.process_from_queue(action, params)
self.queued_actions = []
else:
# shove queue into session
self.request.session.setdefault(SPLANGO_QUEUED_UPDATES, []).extend(self.queued_actions)
self.queued_actions = []
# and include JS if suitable for this response.
if response['Content-Type'].split(';')[0] in _HTML_TYPES:
response.content = replace_insensitive(smart_unicode(response.content), u'</body>', smart_unicode(self.render_js() + u'</body>'))
return response
def get_subject(self):
assert self.request.session[SPLANGO_STATE] == S_HUMAN, "Hey, you can't call get_subject until you know the subject is a human!"
sub = self.request.session.get(SPLANGO_SUBJECT)
if not sub:
sub = self.request.session[SPLANGO_SUBJECT] = Subject()
sub.save()
logging.info("SPLANGO! created subject: %s" % str(sub))
return sub
def declare_and_enroll(self, exp_name, variants):
e = Experiment.declare(exp_name, variants)
if self.request.session[SPLANGO_STATE] != S_HUMAN:
logging.info("SPLANGO! choosing new random variant for non-human")
v = e.get_random_variant()
self.enqueue("enroll", { "exp_name": e.name, "variant": v })
else:
sub = self.get_subject()
sv = e.get_variant_for(sub)
v = sv.variant
logging.info("SPLANGO! got variant %s for subject %s" % (str(v),str(sub)))
return v
def log_goal(self, goal_name, extra=None):
request_info = GoalRecord.extract_request_info(self.request)
self.enqueue("log_goal", { "goal_name": goal_name,
"request_info": request_info,
"extra": extra })
| shimon/Splango | splango/__init__.py | Python | mit | 7,869 |
# ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# Copyright (c) 2008-2021 pyglet contributors
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
"""Audio and video playback.
pyglet can play WAV files, and if FFmpeg is installed, many other audio and
video formats.
Playback is handled by the :class:`.Player` class, which reads raw data from
:class:`Source` objects and provides methods for pausing, seeking, adjusting
the volume, and so on. The :class:`.Player` class implements the best
available audio device. ::
player = Player()
A :class:`Source` is used to decode arbitrary audio and video files. It is
associated with a single player by "queueing" it::
source = load('background_music.mp3')
player.queue(source)
Use the :class:`.Player` to control playback.
If the source contains video, the :py:meth:`Source.video_format` attribute
will be non-None, and the :py:attr:`Player.texture` attribute will contain the
current video image synchronised to the audio.
Decoding sounds can be processor-intensive and may introduce latency,
particularly for short sounds that must be played quickly, such as bullets or
explosions. You can force such sounds to be decoded and retained in memory
rather than streamed from disk by wrapping the source in a
:class:`StaticSource`::
bullet_sound = StaticSource(load('bullet.wav'))
The other advantage of a :class:`StaticSource` is that it can be queued on
any number of players, and so played many times simultaneously.
Pyglet relies on Python's garbage collector to release resources when a player
has finished playing a source. In this way some operations that could affect
the application performance can be delayed.
The player provides a :py:meth:`Player.delete` method that can be used to
release resources immediately.
"""
from .drivers import get_audio_driver
from .exceptions import MediaDecodeException
from .player import Player, PlayerGroup
from .codecs import get_decoders, get_encoders, add_decoders, add_encoders
from .codecs import add_default_media_codecs, have_ffmpeg
from .codecs import Source, StaticSource, StreamingSource, SourceGroup
from . import synthesis
__all__ = (
'load',
'get_audio_driver',
'Player',
'PlayerGroup',
'SourceGroup',
'get_encoders',
'get_decoders',
'add_encoders',
'add_decoders',
)
def load(filename, file=None, streaming=True, decoder=None):
"""Load a Source from a file.
All decoders that are registered for the filename extension are tried.
If none succeed, the exception from the first decoder is raised.
You can also specifically pass a decoder to use.
:Parameters:
`filename` : str
Used to guess the media format, and to load the file if `file` is
unspecified.
`file` : file-like object or None
Source of media data in any supported format.
`streaming` : bool
If `False`, a :class:`StaticSource` will be returned; otherwise
(default) a :class:`~pyglet.media.StreamingSource` is created.
`decoder` : MediaDecoder or None
A specific decoder you wish to use, rather than relying on
automatic detection. If specified, no other decoders are tried.
:rtype: StreamingSource or Source
"""
if decoder:
return decoder.decode(file, filename, streaming)
else:
first_exception = None
for decoder in get_decoders(filename):
try:
loaded_source = decoder.decode(file, filename, streaming)
return loaded_source
except MediaDecodeException as e:
if not first_exception or first_exception.exception_priority < e.exception_priority:
first_exception = e
# TODO: Review this:
# The FFmpeg codec attempts to decode anything, so this codepath won't be reached.
if not first_exception:
raise MediaDecodeException('No decoders are available for this media format.')
raise first_exception
add_default_media_codecs()
| calexil/FightstickDisplay | pyglet/media/__init__.py | Python | gpl-3.0 | 5,683 |
import smtplib
from decimal import Decimal
from django.conf import settings
from django.contrib.auth.hashers import make_password
from django.contrib.auth.models import (
AbstractBaseUser,
BaseUserManager,
Group as DjangoGroup,
GroupManager as _GroupManager,
Permission,
PermissionsMixin,
)
from django.core import mail
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db import models
from django.db.models import Prefetch
from django.utils import timezone
from jsonfield import JSONField
from openslides.utils.manager import BaseManager
from ..core.config import config
from ..utils.auth import GROUP_ADMIN_PK
from ..utils.autoupdate import inform_changed_data
from ..utils.models import (
CASCADE_AND_AUTOUPDATE,
SET_NULL_AND_AUTOUPDATE,
RESTModelMixin,
)
from .access_permissions import (
GroupAccessPermissions,
PersonalNoteAccessPermissions,
UserAccessPermissions,
)
class UserManager(BaseUserManager):
"""
Customized manager that creates new users only with a password and a
username. It also supports our get_prefetched_queryset method.
"""
def get_prefetched_queryset(self, ids=None):
"""
Returns the normal queryset with all users. In the background all
groups are prefetched from the database together with all permissions
and content types.
"""
queryset = self.get_queryset()
if ids:
queryset = queryset.filter(pk__in=ids)
return queryset.prefetch_related(
Prefetch(
"groups",
queryset=Group.objects.select_related("group_ptr").prefetch_related(
Prefetch(
"permissions",
queryset=Permission.objects.select_related("content_type"),
)
),
),
"vote_delegated_from_users",
)
def create_user(self, username, password, skip_autoupdate=False, **kwargs):
"""
Creates a new user only with a password and a username.
"""
user = self.model(username=username, **kwargs)
user.set_password(password)
user.save(skip_autoupdate=skip_autoupdate, using=self._db)
return user
def create_or_reset_admin_user(self, skip_autoupdate=False):
"""
Creates an user with the username 'admin'. If such a user already
exists, resets it. The password is (re)set to 'admin'. The user
becomes member of the group 'Admin'.
"""
created = False
try:
admin = self.get(username="admin")
except ObjectDoesNotExist:
admin = self.model(username="admin", last_name="Administrator")
created = True
admin.default_password = "admin"
admin.password = make_password(admin.default_password)
admin.save(skip_autoupdate=skip_autoupdate)
admin.groups.add(GROUP_ADMIN_PK)
if not skip_autoupdate:
inform_changed_data(admin)
return created
def generate_username(self, first_name, last_name):
"""
Generates a username from first name and last name.
"""
first_name = first_name.strip()
last_name = last_name.strip()
if first_name and last_name:
base_name = " ".join((first_name, last_name))
else:
base_name = first_name or last_name
if not base_name:
raise ValueError(
"Either 'first_name' or 'last_name' must not be empty."
)
if not self.filter(username=base_name).exists():
generated_username = base_name
else:
counter = 0
while True:
counter += 1
test_name = f"{base_name} {counter}"
if not self.filter(username=test_name).exists():
generated_username = test_name
break
return generated_username
class User(RESTModelMixin, PermissionsMixin, AbstractBaseUser):
"""
Model for users in OpenSlides. A client can login as an user with
credentials. An user can also just be used as representation for a person
in other OpenSlides apps like motion submitter or (assignment) election
candidates.
"""
access_permissions = UserAccessPermissions()
USERNAME_FIELD = "username"
username = models.CharField(max_length=255, unique=True, blank=True)
auth_type = models.CharField(max_length=64, default="default")
first_name = models.CharField(max_length=255, blank=True)
last_name = models.CharField(max_length=255, blank=True)
gender = models.CharField(max_length=255, blank=True)
email = models.EmailField(blank=True)
last_email_send = models.DateTimeField(blank=True, null=True)
# TODO: Try to remove the default argument in the following fields.
structure_level = models.CharField(max_length=255, blank=True, default="")
title = models.CharField(max_length=50, blank=True, default="")
number = models.CharField(max_length=50, blank=True, default="")
about_me = models.TextField(blank=True, default="")
comment = models.TextField(blank=True, default="")
default_password = models.CharField(max_length=100, blank=True, default="")
is_active = models.BooleanField(default=True)
is_present = models.BooleanField(default=False)
is_committee = models.BooleanField(default=False)
vote_weight = models.DecimalField(
default=Decimal("1"), max_digits=15, decimal_places=6, null=False, blank=True
)
vote_delegated_to = models.ForeignKey(
"self",
on_delete=SET_NULL_AND_AUTOUPDATE,
null=True,
blank=True,
related_name="vote_delegated_from_users",
)
objects = UserManager()
class Meta:
default_permissions = ()
permissions = (
("can_see_name", "Can see names of users"),
(
"can_see_extra_data",
"Can see extra data of users (e.g. email and comment)",
),
("can_change_password", "Can change its own password"),
("can_manage", "Can manage users"),
)
ordering = ("last_name", "first_name", "username")
def __str__(self):
# Strip white spaces from the name parts
first_name = self.first_name.strip()
last_name = self.last_name.strip()
# The user has a last_name and a first_name
if first_name and last_name:
name = " ".join((self.first_name, self.last_name))
# The user has only a first_name or a last_name or no name
else:
name = first_name or last_name or self.username
# Return result
return name
def save(self, *args, **kwargs):
"""
Overridden method to skip autoupdate if only last_login field was
updated as it is done during login.
"""
if kwargs.get("update_fields") == ["last_login"]:
kwargs["skip_autoupdate"] = True
return super().save(*args, **kwargs)
def has_perm(self, perm):
"""
This method is closed. Do not use it but use openslides.utils.auth.has_perm.
"""
raise RuntimeError(
"Do not use user.has_perm() but use openslides.utils.auth.has_perm"
)
def send_invitation_email(
self, connection, subject, message, skip_autoupdate=False
):
"""
Sends an invitation email to the users. Returns True on success, False on failiure.
May raise an ValidationError, if something went wrong.
"""
if not self.email:
return False
# Custom dict class that for formatstrings with entries like {not_existent}
# no error is raised and this is replaced with ''.
class format_dict(dict):
def __missing__(self, key):
return ""
message_format = format_dict(
{
"name": str(self),
"event_name": config["general_event_name"],
"url": config["users_pdf_url"],
"username": self.username,
"password": self.default_password,
}
)
try:
message = message.format(**message_format)
except KeyError as err:
raise ValidationError({"detail": "Invalid property {0}", "args": [err]})
subject_format = format_dict(
{"event_name": config["general_event_name"], "username": self.username}
)
try:
subject = subject.format(**subject_format)
except KeyError as err:
raise ValidationError({"detail": "Invalid property {0}", "args": [err]})
# Create an email and send it.
email = mail.EmailMessage(
subject,
message,
config["users_email_sender"] + " <" + settings.DEFAULT_FROM_EMAIL + ">",
[self.email],
reply_to=[config["users_email_replyto"]],
)
try:
count = connection.send_messages([email])
except smtplib.SMTPDataError as e:
error = e.smtp_code
helptext = ""
if error == 554: # The server does not accept our connection. The code is
# something like "transaction failed" or "No SMTP service here"
helptext = " Is the email sender correct?"
connection.close()
raise ValidationError(
{
"detail": "Error {0}. Cannot send email.{1}",
"args": [error, helptext],
}
)
except smtplib.SMTPRecipientsRefused:
pass # Run into returning false later
except smtplib.SMTPAuthenticationError as e:
# Nice error message on auth failure
raise ValidationError(
{
"detail": "Error {0}: Authentication failure. Please contact your local administrator.",
"args": [e.smtp_code],
}
)
else:
if count == 1:
self.email_send = True
self.last_email_send = timezone.now()
self.save(skip_autoupdate=skip_autoupdate)
return True
return False
@property
def session_auth_hash(self):
"""
Returns the session auth hash of a user as attribute.
Needed for the django rest framework.
"""
return self.get_session_auth_hash()
class GroupManager(_GroupManager):
"""
Customized manager that supports our get_prefetched_queryset method.
"""
def get_prefetched_queryset(self, ids=None):
"""
Returns the normal queryset with all groups. In the background all
permissions with the content types are prefetched from the database.
"""
queryset = self.get_queryset()
if ids:
queryset = queryset.filter(pk__in=ids)
return queryset.select_related("group_ptr").prefetch_related(
Prefetch(
"permissions",
queryset=Permission.objects.select_related("content_type"),
)
)
class Group(RESTModelMixin, DjangoGroup):
"""
Extend the django group with support of our REST and caching system.
"""
access_permissions = GroupAccessPermissions()
objects = GroupManager()
class Meta:
default_permissions = ()
class PersonalNoteManager(BaseManager):
"""
Customized model manager to support our get_prefetched_queryset method.
"""
def get_prefetched_queryset(self, *args, **kwargs):
"""
Returns the normal queryset with all personal notes. In the background all
users are prefetched from the database.
"""
return super().get_prefetched_queryset(*args, **kwargs).select_related("user")
class PersonalNote(RESTModelMixin, models.Model):
"""
Model for personal notes (e. g. likes/stars) of a user concerning different
openslides objects like motions.
"""
access_permissions = PersonalNoteAccessPermissions()
personalized_model = True
"""
Each model belongs to one user. This relation is set during creation and
will not be changed.
"""
objects = PersonalNoteManager()
user = models.OneToOneField(User, on_delete=CASCADE_AND_AUTOUPDATE)
notes = JSONField()
class Meta:
default_permissions = ()
| jwinzer/OpenSlides | server/openslides/users/models.py | Python | mit | 12,597 |
# Copyright (C) 2016 - Yevgen Muntyan
# Copyright (C) 2016 - Ignacio Casal Quinteiro
# Copyright (C) 2016 - Arnavion
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import os
from gvsbuild.utils.base_builders import CmakeProject
from gvsbuild.utils.base_expanders import Tarball
from gvsbuild.utils.base_project import Project, project_add
from gvsbuild.utils.utils import file_replace
@project_add
class Libarchive(Tarball, CmakeProject):
def __init__(self):
Project.__init__(
self,
"libarchive",
archive_url="https://libarchive.org/downloads/libarchive-3.6.0.tar.xz",
hash="df283917799cb88659a5b33c0a598f04352d61936abcd8a48fe7b64e74950de7",
dependencies=[
"cmake",
"ninja",
"win-iconv",
"zlib",
"lz4",
"openssl",
"libxml2",
],
)
def build(self):
CmakeProject.build(self, cmake_params="-DENABLE_WERROR=OFF", use_ninja=True)
# Fix the pkg-config .pc file, correcting the library's names
file_replace(
os.path.join(self.pkg_dir, "lib", "pkgconfig", "libarchive.pc"),
[
(" -llz4", " -lliblz4"),
(" -leay32", " -llibeay32"),
(" -lxml2", " -llibxml2"),
],
)
self.install(r".\COPYING share\doc\libarchive")
| wingtk/gvsbuild | gvsbuild/projects/libarchive.py | Python | gpl-2.0 | 2,039 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Copyright 2009 - 2014 Glencoe Software, Inc. All rights reserved.
Use is subject to license terms supplied in LICENSE.txt
"""
__save__ = __name__
__name__ = 'omero'
try:
api = __import__('omero.api')
model = __import__('omero.model')
util = __import__('omero.util')
sys = __import__('omero.sys')
import omero.all
finally:
__name__ = __save__
del __save__
sys = __import__("sys")
import threading
import logging
import IceImport
import Ice
import uuid
IceImport.load("Glacier2_Router_ice")
import Glacier2
class BaseClient(object):
"""
Central client-side blitz entry point, and should be in sync with
OmeroJava's omero.client and OmeroCpp's omero::client.
Typical usage includes::
# Uses --Ice.Config argument or ICE_CONFIG variable
client = omero.client()
# Defines "omero.host"
client = omero.client(host = host)
# Defines "omero.host" and "omero.port"
client = omero.client(host = host, port = port)
"""
def __init__(self, args=None, id=None, host=None, port=None, pmap=None):
"""
Constructor which takes one sys.argv-style list, one initialization
data, one host string, one port integer, and one properties map, in
that order. *However*, to simplify use, we reassign values based on
their type with a warning printed. A cleaner approach is to use named
parameters.
::
c1 = omero.client(None, None, "host", myPort) # Correct
c2 = omero.client(host = "host", port = myPort) # Correct
# Works with warning
c3 = omero.client("host", myPort)
Both "Ice" and "omero" prefixed properties will be parsed.
Defines the state variables::
__previous : InitializationData from any previous communicator, if
any. Used to re-initialization the client
post-closeSession()
__ic : communicator. Nullness => init() needed on
createSession()
__sf : current session. Nullness => createSession() needed.
__resources: if non-null, hs access to this client instance and
will periodically call sf.keepAlive(None) in order to
keep any session alive. This can be enabled either
via the omero.keep_alive configuration property, or
by calling the enableKeepAlive() method.
Once enabled, the period cannot be adjusted during a
single session.
Modifying these variables outside of the accessors can lead to
undefined behavior.
Equivalent to all OmeroJava and OmeroCpp constructors
"""
# Setting all protected values to prevent AttributeError
self.__agent = "OMERO.py" #: See setAgent
self.__ip = None #: See setIP
self.__insecure = False
self.__previous = None
self.__ic = None
self.__oa = None
self.__cb = None
self.__sf = None
self.__uuid = None
self.__resources = None
self.__lock = threading.RLock()
# Logging
self.__logger = logging.getLogger("omero.client")
logging.basicConfig() # Does nothing if already configured
# Reassigning based on argument type
args, id, host, port, pmap = self._repair(args, id, host, port, pmap)
# Copying args since we don't really want them edited
if not args:
args = []
else:
# See ticket:5516 To prevent issues on systems where the base
# class of path.path is unicode, we will encode all unicode
# strings here.
for idx, arg in enumerate(args):
if isinstance(arg, unicode):
arg = arg.encode("utf-8")
args[idx] = arg
# Equiv to multiple constructors. #######################
if id is None:
id = Ice.InitializationData()
if id.properties is None:
id.properties = Ice.createProperties(args)
id.properties.parseCommandLineOptions("omero", args)
if host:
id.properties.setProperty("omero.host", str(host))
if not port:
port = id.properties.getPropertyWithDefault(
"omero.port", str(omero.constants.GLACIER2PORT))
id.properties.setProperty("omero.port", str(port))
if pmap:
for k, v in pmap.items():
id.properties.setProperty(str(k), str(v))
self._initData(id)
def _repair(self, args, id, host, port, pmap):
"""
Takes the 5 arguments passed to the __init__ method
and attempts to re-order them based on their types.
This allows for simplified usage without parameter
names.
"""
types = [list, Ice.InitializationData, str, int, dict]
original = [args, id, host, port, pmap]
repaired = [None, None, None, None, None]
# Check all to see if valid
valid = True
for i in range(0, len(types)):
if None != original[i] and not isinstance(original[i], types[i]):
valid = False
break
if valid:
return original
# Now try to find corrections.
for i in range(0, len(types)):
found = None
for j in range(0, len(types)):
if isinstance(original[j], types[i]):
if not found:
found = original[j]
else:
raise omero.ClientError(
"Found two arguments of same type: " +
str(types[i]))
if found:
repaired[i] = found
return repaired
def _initData(self, id):
"""
Initializes the current client via an Ice.InitializationData
instance. This is called by all of the constructors, but may
also be called on createSession(name, pass) if a previous
call to closeSession() has nulled the Ice.Communicator.
"""
if not id:
raise omero.ClientError("No initialization data provided.")
# Strictly necessary for this class to work
self._optSetProp(id, "Ice.ImplicitContext", "Shared")
if Ice.intVersion() >= 30600:
self._optSetProp(id, "Ice.ACM.Client.Timeout",
str(omero.constants.ACMCLIENTTIMEOUT))
self._optSetProp(id, "Ice.ACM.Client.Heartbeat",
str(omero.constants.ACMCLIENTHEARTBEAT))
else:
self._optSetProp(id, "Ice.ACM.Client", "0")
self._optSetProp(id, "Ice.CacheMessageBuffers", "0")
self._optSetProp(id, "Ice.RetryIntervals", "-1")
self._optSetProp(id, "Ice.Default.EndpointSelection", "Ordered")
self._optSetProp(id, "Ice.Default.PreferSecure", "1")
self._optSetProp(id, "Ice.Plugin.IceSSL", "IceSSL:createIceSSL")
if Ice.intVersion() >= 30600:
if sys.platform == "darwin":
self._optSetProp(id, "IceSSL.Ciphers", "NONE (DH_anon.*AES)")
else:
self._optSetProp(id, "IceSSL.Ciphers", "ADH")
else:
self._optSetProp(id, "IceSSL.Ciphers", "ADH")
self._optSetProp(id, "IceSSL.VerifyPeer", "0")
self._optSetProp(id, "IceSSL.Protocols", "tls1")
# Setting block size
self._optSetProp(
id, "omero.block_size", str(omero.constants.DEFAULTBLOCKSIZE))
# Set the default encoding if this is Ice 3.5 or later
# and none is set.
if Ice.intVersion() >= 30500:
self._optSetProp(
id, "Ice.Default.EncodingVersion", "1.0")
# Setting MessageSizeMax
self._optSetProp(
id, "Ice.MessageSizeMax", str(omero.constants.MESSAGESIZEMAX))
# Setting ConnectTimeout
self.parseAndSetInt(id, "Ice.Override.ConnectTimeout",
omero.constants.CONNECTTIMEOUT)
# Set large thread pool max values for all communicators
for x in ("Client", "Server"):
sizemax = id.properties.getProperty(
"Ice.ThreadPool.%s.SizeMax" % x)
if not sizemax or len(sizemax) == 0:
id.properties.setProperty(
"Ice.ThreadPool.%s.SizeMax" % x, "50")
# Port, setting to default if not present
port = self.parseAndSetInt(id, "omero.port",
omero.constants.GLACIER2PORT)
# Default Router, set a default and then replace
router = id.properties.getProperty("Ice.Default.Router")
if not router or len(router) == 0:
router = str(omero.constants.DEFAULTROUTER)
host = id.properties.getPropertyWithDefault(
"omero.host", """<"omero.host" not set>""")
router = router.replace("@omero.port@", str(port))
router = router.replace("@omero.host@", str(host))
id.properties.setProperty("Ice.Default.Router", router)
# Dump properties
dump = id.properties.getProperty("omero.dump")
if len(dump) > 0:
m = self.getPropertyMap(id.properties)
keys = list(m.keys())
keys.sort()
for key in keys:
print "%s=%s" % (key, m[key])
self.__lock.acquire()
try:
if self.__ic:
raise omero.ClientError("Client already initialized")
try:
self.__ic = Ice.initialize(id)
except Ice.EndpointParseException:
msg = "No host specified. "
msg += "Use omero.client(HOSTNAME), ICE_CONFIG, or similar."
raise omero.ClientError(msg)
if not self.__ic:
raise omero.ClientError("Improper initialization")
# Register Object Factory
import ObjectFactoryRegistrar as ofr
ofr.registerObjectFactory(self.__ic, self)
for of in omero.rtypes.ObjectFactories.values():
of.register(self.__ic)
# Define our unique identifier (used during close/detach)
self.__uuid = str(uuid.uuid4())
ctx = self.__ic.getImplicitContext()
if not ctx:
raise omero.ClientError(
"Ice.ImplicitContext not set to Shared")
ctx.put(omero.constants.CLIENTUUID, self.__uuid)
# ticket:2951 - sending user group
group = id.properties.getPropertyWithDefault("omero.group", "")
if group:
ctx.put("omero.group", group)
finally:
self.__lock.release()
def setAgent(self, agent):
"""
Sets the omero.model.Session#getUserAgent() string for
this client. Every session creation will be passed this argument.
Finding open sessions with the same agent can be done via
omero.api.ISessionPrx#getMyOpenAgentSessions(String).
"""
self.__agent = agent
def setIP(self, ip):
"""
Sets the omero.model.Session#getUserIP() string for
this client. Every session creation will be passed this argument.
Finding open sessions with the same IP can be done via
omero.api.ISessionPrx#getMyOpenIPSessions(ip).
"""
self.__ip = ip
def isSecure(self):
"""
Specifies whether or not this client was created via a call to
createClient with a boolean of False. If insecure, then all
remote calls will use the insecure connection defined by the server.
"""
return not self.__insecure
def createClient(self, secure):
"""
Creates a possibly insecure omero.client instance and calls
joinSession using the current getSessionId value. If secure is False,
then first the "omero.router.insecure" configuration property is
retrieved from the server and used as the value of
"Ice.Default.Router" for the new client. Any exception thrown during
creation is passed on to the caller.
Note: detachOnDestroy has NOT been called on the session in the
returned client.
Clients are responsible for doing this immediately if such desired.
"""
props = self.getPropertyMap()
if not secure:
insecure = self.getSession().getConfigService().getConfigValue(
"omero.router.insecure")
if insecure is not None and insecure != "":
# insecure still has @omero.host@, so we need to substitute it
router = self.getRouter(self.getCommunicator())
if router is not None:
for endpoint in router.ice_getEndpoints():
host = endpoint.getInfo().host
if host != "":
insecure = insecure.replace("@omero.host@", str(host))
props["Ice.Default.Router"] = insecure
else:
self.__logger.warn(
"Could not retrieve \"omero.router.insecure\"")
nClient = omero.client(props)
nClient.__insecure = not secure
nClient.setAgent("%s;secure=%s" % (self.__agent, secure))
nClient.joinSession(self.getSessionId())
return nClient
def __del__(self):
"""
Calls closeSession() and ignores any exceptions.
Equivalent to close() in OmeroJava or omero::client::~client()
"""
try:
self.closeSession()
except Exception, e:
# It is perfectly normal for the session to have been closed
# before garbage collection
# though for some reason I can't match this exception with the
# Glacier2.SessionNotExistException class.
# Using str matching instead.
if 'Glacier2.SessionNotExistException' not in str(e.__class__):
self.__logger.warning(
"..Ignoring error in client.__del__:" + str(e.__class__))
def getCommunicator(self):
"""
Returns the Ice.Communicator for this instance or throws
an exception if None.
"""
self.__lock.acquire()
try:
if not self.__ic:
raise omero.ClientError(
"No Ice.Communicator active; call createSession() "
"or create a new client instance")
return self.__ic
finally:
self.__lock.release()
def getAdapter(self):
"""
Returns the Ice.ObjectAdapter for this instance or throws
an exception if None.
"""
self.__lock.acquire()
try:
if not self.__oa:
raise omero.ClientError(
"No Ice.ObjectAdapter active; call createSession() "
"or create a new client instance")
return self.__oa
finally:
self.__lock.release()
def getSession(self, blocking=True):
"""
Returns the current active session or throws an exception if none has
been created since the last closeSession()
If blocking is False, then self.__lock is not acquired and the value
of self.__sf is simply returned. Clients must properly handle the
situation where this value is None.
"""
if not blocking:
return self.__sf
self.__lock.acquire(blocking)
try:
sf = self.__sf
if not sf:
raise omero.ClientError("No session available")
return sf
finally:
self.__lock.release()
def getSessionId(self):
"""
Returns the UUID for the current session without making a remote call.
Uses getSession() internally and will throw an exception if no session
is active.
"""
return self.getSession().ice_getIdentity().name
def getCategory(self):
"""
Returns the category which should be used for all callbacks
passed to the server.
"""
return self.getRouter(self.__ic).getCategoryForClient()
def getImplicitContext(self):
"""
Returns the Ice.ImplicitContext which defines what properties
will be sent on every method invocation.
"""
return self.getCommunicator().getImplicitContext()
def getContext(self, group=None):
"""
Returns a copy of the implicit context's context, i.e.
dict(getImplicitContext().getContext()) for use as the
last argument to any remote method.
"""
ctx = self.getImplicitContext().getContext()
ctx = dict(ctx)
if group is not None:
ctx["omero.group"] = str(group)
return ctx
def getProperties(self):
"""
Returns the active properties for this instance
"""
self.__lock.acquire()
try:
return self.__ic.getProperties()
finally:
self.__lock.release()
def getProperty(self, key):
"""
Returns the property for the given key or "" if none present
"""
return self.getProperties().getProperty(key)
def getPropertyMap(self, properties=None):
"""
Returns all properties which are prefixed with "omero." or "Ice."
"""
if properties is None:
properties = self.getProperties()
rv = dict()
for prefix in ["omero", "Ice"]:
for k, v in properties.getPropertiesForPrefix(prefix).items():
rv[k] = v
return rv
def getDefaultBlockSize(self):
"""
Returns the user-configured "omero.block_size" property or
omero.constants.DEFAULTBLOCKSIZE if none is set.
"""
try:
return int(self.getProperty("omero.block_size"))
except:
return omero.constants.DEFAULTBLOCKSIZE
def joinSession(self, session):
"""
Uses the given session uuid as name
and password to rejoin a running session
"""
return self.createSession(session, session)
def createSession(self, username=None, password=None):
"""
Performs the actual logic of logging in, which is done via the
getRouter(). Disallows an extant ServiceFactoryPrx, and
tries to re-create a null Ice.Communicator. A null or empty
username will throw an exception, but an empty password is allowed.
"""
import omero
self.__lock.acquire()
try:
# Checking state
if self.__sf:
raise omero.ClientError(
"Session already active. "
"Create a new omero.client or closeSession()")
if not self.__ic:
if not self.__previous:
raise omero.ClientError(
"No previous data to recreate communicator.")
self._initData(self.__previous)
self.__previous = None
# Check the required properties
if not username:
username = self.getProperty("omero.user")
elif isinstance(username, omero.RString):
username = username.val
if not username or len(username) == 0:
raise omero.ClientError("No username specified")
if not password:
password = self.getProperty("omero.pass")
elif isinstance(password, omero.RString):
password = password.val
if not password:
raise omero.ClientError("No password specified")
# Acquire router and get the proxy
prx = None
retries = 0
while retries < 3:
reason = None
if retries > 0:
self.__logger.warning(
"%s - createSession retry: %s" % (reason, retries))
try:
ctx = self.getContext()
ctx[omero.constants.AGENT] = self.__agent
if self.__ip is not None:
ctx[omero.constants.IP] = self.__ip
rtr = self.getRouter(self.__ic)
prx = rtr.createSession(username, password, ctx)
# Create the adapter
self.__oa = self.__ic.createObjectAdapterWithRouter(
"omero.ClientCallback", rtr)
self.__oa.activate()
id = Ice.Identity()
id.name = self.__uuid
id.category = rtr.getCategoryForClient()
self.__cb = BaseClient.CallbackI(self.__ic, self.__oa, id)
self.__oa.add(self.__cb, id)
break
except omero.WrappedCreateSessionException, wrapped:
if not wrapped.concurrency:
raise wrapped # We only retry concurrency issues.
reason = "%s:%s" % (wrapped.type, wrapped.reason)
retries = retries + 1
except Ice.ConnectTimeoutException, cte:
reason = "Ice.ConnectTimeoutException:%s" % str(cte)
retries = retries + 1
if not prx:
raise omero.ClientError("Obtained null object prox")
# Check type
self.__sf = omero.api.ServiceFactoryPrx.uncheckedCast(prx)
if not self.__sf:
raise omero.ClientError(
"Obtained object proxy is not a ServiceFactory")
# Configure keep alive
self.startKeepAlive()
# Set the client callback on the session
# and pass it to icestorm
try:
raw = self.__oa.createProxy(self.__cb.id)
self.__sf.setCallback(
omero.api.ClientCallbackPrx.uncheckedCast(raw))
# self.__sf.subscribe("/public/HeartBeat", raw)
except:
self.__del__()
raise
# Set the session uuid in the implicit context
self.getImplicitContext().put(
omero.constants.SESSIONUUID, self.getSessionId())
return self.__sf
finally:
self.__lock.release()
def enableKeepAlive(self, seconds):
"""
Resets the "omero.keep_alive" property on the current
Ice.Communicator which is used on initialization to determine
the time-period between Resource checks. The __resources
instance will be created as soon as an active session is
detected.
"""
self.__lock.acquire()
try:
# A communicator must be configured!
ic = self.getCommunicator()
# Setting this here guarantees that after closeSession()
# the next createSession() will use the new value despite
# what was in the configuration file
ic.getProperties().setProperty("omero.keep_alive", str(seconds))
# If there's not a session, there should be no
# __resources but just in case since startKeepAlive
# could have been called manually.
if seconds <= 0:
self.stopKeepAlive()
else:
try:
# If there's a session, then go ahead and
# start the keep alive.
self.getSession()
self.startKeepAlive()
except omero.ClientError:
pass
finally:
self.__lock.release()
def startKeepAlive(self):
"""
Start a new __resources instance, stopping any that current exists
IF omero.keep_alive is greater than 1.
"""
self.__lock.acquire()
try:
ic = self.getCommunicator()
props = ic.getProperties()
seconds = -1
try:
seconds = props.getPropertyWithDefault(
"omero.keep_alive", "-1")
seconds = int(seconds)
except ValueError:
pass
# Any existing resource should be shutdown.
if self.__resources is not None:
self.stopKeepAlive()
# If seconds is more than 0, a new one should be started.
if seconds > 0:
self.__resources = omero.util.Resources(seconds)
class Entry:
def __init__(self, c):
self.c = c
def cleanup(self):
pass
def check(self):
sf = self.c._BaseClient__sf
ic = self.c._BaseClient__ic
if sf is not None:
try:
sf.keepAlive(None)
except Exception:
if ic is not None:
ic.getLogger().warning(
"Proxy keep alive failed.")
return False
return True
self.__resources.add(Entry(self))
finally:
self.__lock.release()
def stopKeepAlive(self):
self.__lock.acquire()
try:
if self.__resources is not None:
try:
self.__resources.cleanup()
finally:
self.__resources = None
finally:
self.__lock.release()
def getManagedRepository(self):
repoMap = self.getSession().sharedResources().repositories()
prx = None
for prx in repoMap.proxies:
if not prx:
continue
prx = omero.grid.ManagedRepositoryPrx.checkedCast(prx)
if prx:
break
return prx
def getRouter(self, comm):
"""
Acquires the default router, and throws an exception
if it is not of type Glacier2.Router. Also sets the
Ice.ImplicitContext on the router proxy.
"""
prx = comm.getDefaultRouter()
if not prx:
raise omero.ClientError("No default router found.")
router = Glacier2.RouterPrx.uncheckedCast(prx)
if not router:
raise omero.ClientError("Error obtaining Glacier2 router")
# For whatever reason, we have to set the context
# on the router context here as well
router = router.ice_context(comm.getImplicitContext().getContext())
return router
def sha1(self, filename):
"""
Calculates the local sha1 for a file.
"""
try:
from hashlib import sha1 as sha_new
except ImportError:
from sha import new as sha_new
digest = sha_new()
file = open(filename, 'rb')
try:
while True:
block = file.read(1024)
if not block:
break
digest.update(block)
finally:
file.close()
return digest.hexdigest()
def upload(self, filename, name=None, path=None, type=None, ofile=None,
block_size=1024):
"""
Utility method to upload a file to the server.
"""
if not self.__sf:
raise omero.ClientError("No session. Use createSession first.")
import os
import types
if not filename or not isinstance(filename, types.StringType):
raise omero.ClientError("Non-null filename must be provided")
if not os.path.exists(filename):
raise omero.ClientError("File does not exist: " + filename)
from path import path as __path__
filepath = __path__(filename)
file = open(filename, 'rb')
try:
size = os.path.getsize(file.name)
if block_size > size:
block_size = size
if not ofile:
ofile = omero.model.OriginalFileI()
ofile.hash = omero.rtypes.rstring(self.sha1(file.name))
ofile.hasher = omero.model.ChecksumAlgorithmI()
ofile.hasher.value = omero.rtypes.rstring("SHA1-160")
abspath = filepath.normpath().abspath()
if not ofile.name:
if name:
ofile.name = omero.rtypes.rstring(name)
else:
ofile.name = omero.rtypes.rstring(str(abspath.basename()))
if not ofile.path:
ofile.path = omero.rtypes.rstring(
str(abspath.dirname())+os.path.sep)
if not ofile.mimetype:
if type:
# ofile.mimetype = 'application/octet-stream' by default
ofile.mimetype = omero.rtypes.rstring(type)
# Disabled with group permissions #1434
# if permissions:
# ofile.details.permissions = permissions
up = self.__sf.getUpdateService()
ofile = up.saveAndReturnObject(ofile)
prx = self.__sf.createRawFileStore()
try:
prx.setFileId(ofile.id.val)
prx.truncate(size) # ticket:2337
self.write_stream(file, prx, block_size)
finally:
prx.close()
finally:
file.close()
return ofile
def write_stream(self, file, prx, block_size=1024*1024):
offset = 0
while True:
block = file.read(block_size)
if not block:
break
prx.write(block, offset, len(block))
offset += len(block)
def download(self, ofile, filename=None, block_size=1024*1024,
filehandle=None):
if not self.__sf:
raise omero.ClientError("No session. Use createSession first.")
# Search for objects in all groups. See #12146
ctx = self.getContext(group=-1)
prx = self.__sf.createRawFileStore()
try:
if not ofile or not ofile.id:
raise omero.ClientError("No file to download")
ofile = self.__sf.getQueryService().get(
"OriginalFile", ofile.id.val, ctx)
if block_size > ofile.size.val:
block_size = ofile.size.val
prx.setFileId(ofile.id.val, ctx)
size = ofile.size.val
offset = 0
if filehandle is None:
if filename is None:
raise omero.ClientError(
"no filename or filehandle specified")
filehandle = open(filename, 'wb')
else:
if filename:
raise omero.ClientError(
"filename and filehandle specified.")
try:
while (offset+block_size) < size:
filehandle.write(prx.read(offset, block_size))
offset += block_size
filehandle.write(prx.read(offset, (size-offset)))
finally:
if filename:
filehandle.close()
finally:
prx.close()
def submit(self, req, loops=10, ms=500,
failonerror=True, ctx=None, failontimeout=True):
handle = self.getSession().submit(req, ctx)
return self.waitOnCmd(
handle, loops=loops, ms=ms,
failonerror=failonerror,
failontimeout=failontimeout,
closehandle=True)
def waitOnCmd(self, handle, loops=10, ms=500,
failonerror=True,
failontimeout=False,
closehandle=False):
from omero import LockTimeout
try:
callback = omero.callbacks.CmdCallbackI(self, handle)
except:
# Since the callback won't escape this method,
# close the handle if requested.
if closehandle and handle:
handle.close()
raise
try:
callback.loop(loops, ms) # Throw LockTimeout
except LockTimeout:
if failontimeout:
callback.close(closehandle)
raise
else:
return callback
rsp = callback.getResponse()
if isinstance(rsp, omero.cmd.ERR):
if failonerror:
callback.close(closehandle)
raise omero.CmdError(rsp)
return callback
def getStatefulServices(self):
"""
Returns all active StatefulServiceInterface proxies. This can
be used to call close before calling setSecurityContext.
"""
rv = []
sf = self.sf
services = sf.activeServices()
for srv in services:
try:
prx = sf.getByName(srv)
prx = omero.api.StatefulServiceInterfacePrx.checkedCast(prx)
if prx is not None:
rv.append(prx)
except:
self.__logger.warn(
"Error looking up proxy: %s" % srv, exc_info=1)
return rv
def closeSession(self):
"""
Closes the Router connection created by createSession(). Due to a bug
in Ice, only one connection is allowed per communicator, so we also
destroy the communicator.
"""
self.__lock.acquire()
try:
try:
self.stopKeepAlive()
except Exception, e:
self.__logger.warning(
"While cleaning up resources: " + str(e))
self.__sf = None
oldOa = self.__oa
self.__oa = None
oldIc = self.__ic
self.__ic = None
# Only possible if improperly configured.
if not oldIc:
return
if oldOa:
try:
oldOa.deactivate()
except Exception, e:
self.__logger.warning(
"While deactivating adapter: " + str(e.message))
self.__previous = Ice.InitializationData()
self.__previous.properties = oldIc.getProperties().clone()
try:
try:
self.getRouter(oldIc).destroySession()
except Glacier2.SessionNotExistException:
# ok. We don't want it to exist
pass
except Ice.ConnectionLostException:
# ok. Exception will always be thrown
pass
except Ice.ConnectionRefusedException:
# ok. Server probably went down
pass
except Ice.ConnectTimeoutException:
# ok. Server probably went down
pass
# Possible other items to handle/ignore:
# * Ice.DNSException
finally:
oldIc.destroy()
del oldIc._impl # WORKAROUND ticket:2007
finally:
self.__lock.release()
def killSession(self):
"""
Calls ISession.closeSession(omero.model.Session) until
the returned reference count is greater than zero. The
number of invocations is returned. If ISession.closeSession()
cannot be called, -1 is returned.
"""
s = omero.model.SessionI()
s.uuid = omero.rtypes.rstring(self.getSessionId())
try:
svc = self.sf.getSessionService()
except:
self.__logger.warning(
"Cannot get session service for killSession. "
"Using closeSession")
self.closeSession()
return -1
count = 0
try:
r = 1
while r > 0:
count += 1
r = svc.closeSession(s)
except omero.RemovedSessionException:
pass
except:
self.__logger.warning(
"Unknown exception while closing all references",
exc_info=True)
# Now the server-side session is dead, call closeSession()
self.closeSession()
return count
# Environment Methods
# ===========================================================
def _env(self, _unwrap, method, *args):
""" Helper method to access session environment"""
session = self.getSession()
if not session:
raise omero.ClientError("No session active")
u = self.getSessionId()
s = session.getSessionService()
m = getattr(s, method)
rv = apply(m, (u,)+args)
if callable(_unwrap):
rv = _unwrap(rv) # Passed in function
elif _unwrap:
rv = omero.rtypes.unwrap(rv) # Default method
return rv
def getInput(self, key, unwrap=False):
"""
Retrieves an item from the "input" shared (session) memory.
"""
return self._env(unwrap, "getInput", key)
def getOutput(self, key, unwrap=False):
"""
Retrieves an item from the "output" shared (session) memory.
"""
return self._env(unwrap, "getOutput", key)
def setInput(self, key, value):
"""
Sets an item in the "input" shared (session) memory under the given
name.
"""
self._env(False, "setInput", key, value)
def setOutput(self, key, value):
"""
Sets an item in the "output" shared (session) memory under the given
name.
"""
self._env(False, "setOutput", key, value)
def getInputKeys(self):
"""
Returns a list of keys for all items in the "input" shared (session)
memory
"""
return self._env(False, "getInputKeys")
def getOutputKeys(self):
"""
Returns a list of keys for all items in the "output" shared (session)
memory
"""
return self._env(False, "getOutputKeys")
def getInputs(self, unwrap=False):
"""
Returns all items in the "input" shared (session) memory
"""
return self._env(unwrap, "getInputs")
def getOutputs(self, unwrap=False):
"""
Returns all items in the "output" shared (session) memory
"""
return self._env(unwrap, "getOutputKeys")
#
# Misc.
#
def _optSetProp(self, id, key, default=""):
val = id.properties.getProperty(key)
if not val:
val = default
id.properties.setProperty(key, val)
def parseAndSetInt(self, data, key, newValue):
currentValue = data.properties.getProperty(key)
if not currentValue or len(currentValue) == 0:
newStr = str(newValue)
data.properties.setProperty(key, newStr)
currentValue = newStr
return currentValue
def __getattr__(self, name):
"""
Compatibility layer, which allows calls to getCommunicator() and
getSession() to be called via self.ic and self.sf
"""
if name == "ic":
return self.getCommunicator()
elif name == "sf":
return self.getSession()
elif name == "adapter":
return self.getAdapter()
else:
raise AttributeError("Unknown property: " + name)
#
# Callback
#
def onHeartbeat(self, myCallable):
self.__cb.onHeartbeat = myCallable
def onSessionClosed(self, myCallable):
self.__cb.onSessionClosed = myCallable
def onShutdownIn(self, myCallable):
self.__cb.onShutdownIn = myCallable
class CallbackI(omero.api.ClientCallback):
"""
Implemention of ClientCallback which will be added to
any Session which this instance creates. Note: this client
should avoid all interaction with the {@link client#lock} since it
can lead to deadlocks during shutdown. See: ticket:1210
"""
#
# Default callbacks
#
def _noop(self):
pass
def _closeSession(self):
try:
self.oa.deactivate()
except Exception, e:
sys.err.write("On session closed: " + str(e))
def __init__(self, ic, oa, id):
self.ic = ic
self.oa = oa
self.id = id
self.onHeartbeat = self._noop
self.onShutdownIn = self._noop
self.onSessionClosed = self._noop
def execute(self, myCallable, action):
try:
myCallable()
# self.ic.getLogger().trace("ClientCallback", action + " run")
except:
try:
self.ic.getLogger().error("Error performing %s" % action)
except:
print "Error performing %s" % action
def requestHeartbeat(self, current=None):
self.execute(self.onHeartbeat, "heartbeat")
def shutdownIn(self, milliseconds, current=None):
self.execute(self.onShutdownIn, "shutdown")
def sessionClosed(self, current=None):
self.execute(self.onSessionClosed, "sessionClosed")
| dominikl/openmicroscopy | components/tools/OmeroPy/src/omero/clients.py | Python | gpl-2.0 | 41,710 |
from django.db import models
from caya.models import Result, ResultItem
from caya.choices import RESULT_VALIDATION_STATUS
class MeditechResult(Result):
"""
Model for storing information about a specific FACS result obtained from MEDITECH.
This Model is a validation result.
"""
date_of_birth = models.DateField(
help_text = 'Patient Date of Birth'
)
u_number = models.CharField(
max_length=25,
help_text = 'Patient U Number')
authorization_name = models.CharField(
max_length=50,
verbose_name = 'Authorizing Person',
blank=True,
null=True,
)
authorization_datetime = models.DateTimeField(
help_text = 'Date when result was authorized.',
blank=True,
null=True,
)
result_datetime_parsed = models.DateTimeField(
help_text = 'Date result added to system.',
db_index=True,
)
result_datetime_ordered = models.DateTimeField(
help_text = 'Date result was ordered.',
db_index=True,
)
result_datetime_collected = models.DateTimeField(
help_text = 'Date result was collected.',
db_index=True,
)
result_datetime_received = models.DateTimeField(
help_text = 'Date result was received.',
db_index=True,
)
def authorized_by(self):
return self.authorization_name
def authorized_date(self):
return self.authorization_datetime.date()
class Meta:
app_label = 'tokafatso' | elkingtowa/caya | src/models/meditech_result.py | Python | mit | 1,591 |
"""
A set of functions for generating statistics trees.
Annotates crystalized targets and number of ligands/target available in ChEMBL.
"""
from django.db.models import Count
from interaction.models import ResidueFragmentInteraction, StructureLigandInteraction
from ligand.models import AssayExperiment, AnalyzedExperiment
from protein.models import Protein, ProteinFamily
from structure.models import Structure
import json
from collections import OrderedDict
from copy import deepcopy
class PhylogeneticTreeNode(object):
def __init__(self, name='', color=''):
self.name = name
self.color = color
self.children = OrderedDict()
self.exp_data = {
'crystals': 0,
'mutations': 0,
'ligands': 0,
'ligand_bias': 0,
}
def get_value(self, param):
"""
Function returning a parameter for coloring tree leaves.
@param: a parameter based on which a color value will be set.
TODO: Implement a scheme for mutations.
"""
try:
return self.exp_data[param]
except KeyError:
return 0
def increment_value(self, param, value=1):
"""
Function returning a parameter for coloring tree leaves.
@param: a parameter based on which a color value will be set.
TODO: Implement a scheme for mutations.
"""
self.exp_data[param] += value
def update_exp_data(self, data):
for key, value in data.items():
if self.exp_data[key] > value: continue
self.exp_data[key] = value
def get_nodes_dict(self, param):
if param == None:
return OrderedDict([
('name', self.name),
('value', 3000),
('color', self.color),
('children', [
y.get_nodes_dict('crystals') for x,y in self.children.items() if self.children != OrderedDict()
]),
])
else:
return OrderedDict([
('name', self.name),
('value', self.get_value(param)),
('color', self.color),
('children', [
y.get_nodes_dict(param) for x,y in self.children.items() if self.children != OrderedDict()
]),
])
class PhylogeneticTree(object):
def __init__(self, root_lvl, depth, family):
self.tree = PhylogeneticTreeNode()
def add_data(self, path, data):
parent_path = path.split('_')[1:-1]
tmp = self.tree.children
tmp_path = [path.split('_')[0]]
while parent_path != []:
tmp_path.append(parent_path.pop(0))
try:
tmp = tmp['_'.join(tmp_path)].children
except KeyError:
tmp['_'.join(tmp_path)] = data
try:
tmp[path].update_exp_data(data.exp_data)
except KeyError:
tmp[path] = data
def get_data(self, path):
parent_path = path.split('_')[1:-1]
tmp = self.tree.children
tmp_path = [path.split('_')[0]]
while parent_path != []:
tmp_path.append(parent_path.pop(0))
try:
tmp = tmp['_'.join(tmp_path)].children
except KeyError:
print("You're screwed")
try:
print(path)
print(tmp[path].name)
print(tmp[path].exp_data)
except:
pass
def get_nodes(self, level):
ref = self.tree.children
tmp = OrderedDict()
while level:
for child in ref.items():
for grandchild in child[1].children.items():
tmp.update({grandchild[0]: grandchild[1]})
ref = tmp
tmp = OrderedDict()
level = level -1
return ref
def get_nodes_dict(self, param):
return self.tree.get_nodes_dict(param)
class PhylogeneticTreeGenerator(object):
#TODO: This should go to settings as it is GPCR-specific.
#Dict keys are the Class - Protein family pairs. '' means 'any'.
#CSS_COLORS = {
# ("Class F (Frizzled)", '') : 'SteelBlue',
# ('Class A (Rhodopsin)', 'Protein') : 'SteelBlue',
# ('Class A (Rhodopsin)', 'Alicarboxylic acid') : 'Red',
# ('Class B2 (Adhesion)', '') : 'SteelBlue',
# ('Class A (Rhodopsin)', 'Peptide') : 'SkyBlue',
# ('Class B1 (Secretin)', '') : 'SkyBlue',
# ('Class A (Rhodopsin)', 'Lipid') : 'LightGreen',
# ('', 'Orphan') : 'Orange',
# ('Class A (Rhodopsin)', 'Sensory') : 'DarkGray',
# ('Class C (Glutamate)', 'Sensory') : 'DarkGray',
# ('Taste 2', '') : 'DarkGray',
# ('Class A (Rhodopsin)', 'Nucleotide') : 'Purple',
# }
CSS_COLORS = {
("", "Adhesion receptors") : 'Crimson',
("", "Alicarboxylic acid receptors") : 'Red',
("", "Aminergic receptors") : 'OrangeRed',
("", "Amino acid receptors") : 'Orange',
("", "Ion receptors") : 'GoldenRod',
("", "Lipid receptors") : 'Gold',
("", "Melatonin receptors") : 'Yellow',
("", "Nucleotide receptors") : 'YellowGreen',
("", "Orphan receptors") : 'Gold',
("", "Other") : 'Green',
("", "Peptide receptors") : 'SkyBlue',
("", "Protein receptors") : 'SteelBlue',
("", "Sensory receptors") : 'Indigo',
("", "Steroid receptors") : 'Purple',
("Class B2 (Adhesion)", "") : 'LimeGreen',
}
#List of tree levels that should be sorted alphabetically
SORTED_BRANCHES = [2,3]
# o Dark blue: class A Protein ligand type and whole of classes Adhesion and class F (they also have this ligand type)
# o Light blue: Peptide and whole of class B1 (it also has this ligand type)
# o Green: Lipid receptors
# o Orange: Orphan receptors
# o Dark grey: Sensory (class A opsins, class C Taste1 and whole of class Taste2)
# o Purple: Nucleotide (class A P2Y and adenosine)
# o Black: All other
# http://www.d3noob.org/2014/01/tree-diagrams-in-d3js_11.html
def __init__(self, root_lvl=1, depth=3):
self.root_lvl = root_lvl
self.tree_depth = depth
self.lookup = { x: {} for x in range(self.tree_depth+1)}
self.aux_data = {
'crystals': [],
'mutations': [],
'ligands': {},
'ligand_bias': {}
}
self.get_aux_data()
self.d3_options = {
'depth': self.tree_depth,
'branch_length': {},
'branch_trunc': 0,
'leaf_offset': 30
}
self.families = ProteinFamily.objects.all().prefetch_related('parent')
for family in self.families:
if family.slug == '000':
self.lookup[0]['000'] = family
continue
tree_lvl = len(family.slug.split('_'))
if tree_lvl > self.tree_depth:
continue
if family.slug == '005_001_002':
continue
self.lookup[tree_lvl][family.slug] = family
self.color_mapping = {}
self.map_family_colors()
self.proteins = Protein.objects.filter(
family__slug__startswith="00",
source__name='SWISSPROT'
).prefetch_related(
'family',
'family__parent'
).order_by('family__slug', 'species_id') #should fix CXCR4
self.proteins_index = {}
for p in self.proteins:
path = p.family.parent.slug
if not path in self.proteins_index:
self.proteins_index[path] = []
self.proteins_index[path].append(p)
def get_aux_data(self):
self.aux_data['crystals'] = [x.protein_conformation.protein.parent.id for x in
Structure.objects.all()
.distinct
('protein_conformation__protein__parent').prefetch_related('protein_conformation__protein__parent')
]
ligand_data = AssayExperiment.objects.values(
'protein',
'protein__entry_name'
).annotate(num_ligands=Count('ligand', distinct=True))
self.aux_data['ligands'] = {
100 : [x['protein'] for x in ligand_data if x['num_ligands'] <= 100],
500 : [x['protein'] for x in ligand_data if 100 < x['num_ligands'] <= 500],
1000 : [x['protein'] for x in ligand_data if 500 < x['num_ligands'] <= 1000],
2000 : [x['protein'] for x in ligand_data if x['num_ligands'] > 1000] #more than 1000
}
ligand_bias_data = AnalyzedExperiment.objects.values(
'receptor',
'receptor__entry_name'
).annotate(num_ligands=Count('ligand_id', distinct=True))
self.aux_data['ligand_bias'] = {
10 : [x['receptor'] for x in ligand_bias_data if x['num_ligands'] <= 10],
20 : [x['receptor'] for x in ligand_bias_data if 10 < x['num_ligands'] <= 20],
30 : [x['receptor'] for x in ligand_bias_data if 20 < x['num_ligands'] <= 30],
40 : [x['receptor'] for x in ligand_bias_data if x['num_ligands'] > 30] #more than 1000
}
def map_family_colors(self):
for x,y in self.CSS_COLORS.items():
lvl1_slug = [slug for slug, fam in self.lookup[1].items() if (x[0] == fam.name or x[0] == '')]
lvl2_slug = []
for slug, fam in self.lookup[2].items():
if fam.name.startswith(x[1]) and slug[:3] in lvl1_slug:
self.color_mapping[slug] = y
def get_color(self, slug):
try:
return self.color_mapping[slug[:7]]
except KeyError:
return 'Black'
def get_tree_data(self, family):
"""
Prepare data for coverage diagram. Iterative aproach.
"""
self.d3_options['branch_length'] = {}
coverage = PhylogeneticTree(self.root_lvl, self.tree_depth, family)
for lvl in range(self.root_lvl, self.tree_depth+1):
if lvl+1 not in self.d3_options['branch_length']:
self.d3_options['branch_length'][lvl] = ''
if lvl == self.tree_depth:
for path, branch in coverage.get_nodes(lvl-2).items():
tmp_prots = self.proteins_index[path]
for protein in tmp_prots:
tmp_node = PhylogeneticTreeNode(
protein.entry_name.split("_")[0],
self.get_color(protein.family.slug)
)
if protein.id in self.aux_data['crystals']:
tmp_node.increment_value('crystals')
for key in self.aux_data['ligands']:
if protein.id in self.aux_data['ligands'][key]:
tmp_node.increment_value('ligands', key)
for key in self.aux_data['ligand_bias']:
if protein.id in self.aux_data['ligand_bias'][key]:
tmp_node.increment_value('ligand_bias', key)
coverage.add_data(protein.family.slug, tmp_node)
return coverage
children = OrderedDict()
if lvl+1 in self.SORTED_BRANCHES:
for slug, node in sorted(self.lookup[lvl+1].items(), key=lambda y: y[1].name.lower()):
if node.parent.slug.startswith(family.slug):
name = node.name.replace('receptors','').replace('<sub>',' ').replace('</sub>','').strip()
children[slug] = PhylogeneticTreeNode(name, self.get_color(node.slug))
if len(name) > len(self.d3_options['branch_length'][lvl]):
self.d3_options['branch_length'][lvl] = name
else:
for slug, node in self.lookup[lvl+1].items():
if node.parent.slug.startswith(family.slug):
name = node.name.replace('receptors','').replace('<sub>',' ').replace('</sub>','').strip()
children[slug] = PhylogeneticTreeNode(name, self.get_color(node.slug))
if len(name) > len(self.d3_options['branch_length'][lvl]):
self.d3_options['branch_length'][lvl] = name
for path, data in children.items():
coverage.add_data(path, data)
return coverage
def get_coverage_tree(self, family, coverage=PhylogeneticTreeNode()):
"""
Prepare data for coverage diagram.
"""
print('\n'.join([x[1].name for x in coverage.children.items()]))
tmp_root = len(family.slug.split('_'))
if tmp_root < self.root_lvl:
return
if tmp_root == self.tree_depth:
tmp_prots = self.proteins.filter(family__parent=family)
tmp_crystals = self.crystal_proteins.filter(family__parent=family)
for protein in tmp_prots:
if tmp_root - len(protein.family.slug.split('_')) == 1:
tmp_node = PhylogeneticTreeNode(protein.entry_name.split("_")[0])
if self.crystal_proteins.filter(id=protein.id):
tmp_node.increment_value("crystalized")
coverage.children[protein.family.parent.slug].children[protein.family] = deepcopy(tmp_node)
return coverage
if tmp_root+1 in self.SORTED_BRANCHES:
coverage.children = OrderedDict((x[0], PhylogeneticTreeNode(x[1].name))
for x in sorted(
self.lookup[tmp_root+1].items(),
key=lambda y: y[1].name.lower())
if x[1].parent == family
)
else:
coverage.children = OrderedDict({x: PhylogeneticTreeNode(self.lookup[tmp_root+1][x].name)
for x in self.lookup[tmp_root+1]
if self.lookup[tmp_root+1][x].parent == family
})
for slug, branch in coverage.children.items():
branch.children = self.get_coverage_tree(self.families.get(slug=slug), deepcopy(coverage)).children
return coverage
| protwis/protwis | common/phylogenetic_tree.py | Python | apache-2.0 | 14,686 |
"""Convenience module for scripting PyDev Quick Assist proposals in Jyton.
USAGE
=====
Create pyedit_*.py file in your jython script dir of choice, import this
module, subclass AssistProposal, instantiate it and register the instance
with Pydev.
Example:
-------------------------------------------------------------
from assist_proposal import AssistProposal, register_proposal
class MyProposal(AssistProposal):
implementation_goes_here
register_proposal(MyProposal())
-------------------------------------------------------------
The cmd variable is provided automatically by pydev and will be a string
such as 'onSave' or 'onCreateActions' etc...
See docs in source for further details.
"""
__author__ = """Joel Hedlund <joel.hedlund at gmail.com>
Some ideas borrowed from Fabio Zadrozny. These cases are explicitly noted
in the relevant code docs.
"""
__version__ = "1.0.0"
__copyright__ = """Available under the same conditions as PyDev.
See PyDev license for details.
http://pydev.sourceforge.net
"""
from org.python.pydev.editor.correctionassist import \
IAssistProps # @UnresolvedImport
class AssistProposal:
"""Convenience class for adding assist proposals to pydev.
This class does nothing useful. Subclasses should assign proper values
to data members and provide sane implementations for methods.
Class data members
==================
description: <str>
The text displayed to the user in the quick assist menu (Ctrl-1).
tag: <str>
Unique descriptive identifier for the assist.
"""
description = "Remember to change this description"
tag = "REMEMBER_TO_CHANGE_THIS_TAG"
def isValid(self, selection, current_line, editor, offset):
"""Return True if the proposal is applicable, False otherwise.
This method should provide the same interface as the method with
the same name in IAssistProps.
If desirable, subclasses may store the isValid args as instance
data members for use with .apply().
IN:
pyselection: <PySelection>
The current selection. Highly useful.
current_line: <str>
The text on the current line.
editor: <PyEdit>
The current editor.
offset: <int>
The current position in the editor.
OUT:
Boolean. Is the proposal applicable in the current situation?
"""
return False
def apply(self, document):
"""Do what the assist is supposed to do when activated.
This method should provide the same interface as the method with
same name in PyCompletionProposal.
See also docs for the .isValid() method. You might like to use data
from there.
IN:
document: <IDocument>
The edited document.
OUT:
None.
"""
def register_proposal(proposal, debug=False):
"""Register the proposal with the quick assistant.
IN:
proposal: <AssistantProposal>
The object that holds all relevant information and does all the
necessary work for the proposal.
force = False: <bool>
If False (default), we will not attempt to re-register the assist
proposal if an assist proposal with the same tag is already
registered. If True, then we will override the registered proposal
with our own. This is mainly useful for debugging.
OUT:
None.
"""
from org.python.pydev.editor.correctionassist import PythonCorrectionProcessor #@UnresolvedImport
bTagInUse = PythonCorrectionProcessor.hasAdditionalAssist(proposal.tag)
if debug or not bTagInUse:
oInterface = AssistantInterface(proposal)
PythonCorrectionProcessor.addAdditionalAssist(proposal.tag, oInterface)
class AssistantInterface(IAssistProps):
"""Assistant interface wrapper for AssistProposal instances.
The Quick Assistant will ask this class if we can apply the proposal,
and if so, which properties does it have?
Adapted from Fabio Zadroznys AssistAssignParamsToAttributes class in
assign_params_to_attributes_assist.py.
Instance data members
=====================
proposal: <AssistantProposal>
The object that holds all relevant information and does all the
necessary work for the proposal.
"""
def __init__(self, proposal, *args):
"""A new Assistant Interface.
IN:
proposal: <AssistantProposal>
"""
self.proposal = proposal
def getImage(self, imageCache, c):
if imageCache is not None:
return imageCache.get(c)
return None
def isValid(self, ps, sel, editor, offset):
"""java: boolean isValid(PySelection ps, String sel, PyEdit edit, int offset);
"""
return self.proposal.isValid(ps, sel, editor, offset)
def getProps(self, ps, imageCache, f, nature, editor, offset):
'''java: List<ICompletionProposal> getProps(PySelection ps, ImageCache imageCache, File f,
IPythonNature nature, PyEdit edit, int offset)
'''
from java.util import ArrayList #@UnresolvedImport
IPyCompletionProposal = editor.getIPyCompletionProposalClass() #@UnresolvedImport
PyCompletionProposal = editor.getPyCompletionProposalClass() #@UnresolvedImport
UIConstants = editor.getUIConstantsClass() #@UnresolvedImport
class Prop(PyCompletionProposal):
"""This is the proposal that Ctrl+1 will require.
Adapted from Fabio Zadroznys Prop class in
assign_params_to_attributes_assist.py.
Instance data members
=====================
proposal: <AssistantProposal>
The object that holds all relevant information and does all the
necessary work for the proposal.
"""
def __init__(self, proposal, *args):
PyCompletionProposal.__init__(self, *args)
self.proposal = proposal
def apply(self, document):
"""java: public void apply(IDocument document)
"""
self.proposal.apply(document)
def getSelection(self, document):
return None
oProp = Prop(self.proposal,
'', 0, 0, 0,
self.getImage(imageCache, UIConstants.ASSIST_DOCSTRING),
self.proposal.description,
None, None,
IPyCompletionProposal.PRIORITY_DEFAULT)
l = ArrayList()
l.add(oProp)
return l
| akurtakov/Pydev | plugins/org.python.pydev.jython/jysrc/assist_proposal.py | Python | epl-1.0 | 6,835 |
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
from scrapy.exceptions import DropItem
import codecs
import json
class DianpingPipeline(object):
def __init__(self):
self.file = codecs.open('shop_data.json', mode='wb', encoding='utf-8')
def process_item(self, item, spider):
t = {}
for k, v in dict(item).items():
v = ' '.join(v).strip('\r\n').strip('\n').strip().replace('\r\n', ' ').replace('\n', ' ')
t[k] = v
line = json.dumps(t) + '\n'
self.file.write(line.decode("unicode_escape"))
return item
def open_spider(self,spider):
print '====================== OPEN SPIDER ======================='
def close_spider(self,spider):
print '====================== CLOSE SPIDER ======================='
class FileterPipeline(object):
words_to_filter = ['politics', 'religion']
def process_item(self, item, spider):
for word in self.words_to_filter:
if word in unicode(item['description']).lower():
raise DropItem("Contains forbidden word: %s" % word)
else:
return item
return item
class DuplicatesPipeline(object):
def __init__(self):
self.ids_seen = set()
def process_item(self, item, spider):
if item['id'] in self.ids_seen:
raise DropItem("Duplicate item found: %s" % item)
else:
self.ids_seen.add(item['id'])
return item
class JsonWriterPipeline(object):
def __init__(self):
self.file = open('items.jl', 'wb')
def process_item(self, item, spider):
import json
line = json.dumps(dict(item)) + "\n"
self.file.write(line)
return item | MircroCode/dpSpider | dianping/dianping/pipelines.py | Python | mit | 1,850 |
import cherrypy
# import datetime
#import pandas as pd
from cStringIO import StringIO
from mufflerDataBackEnd import MufflerVPRDataBackEnd
from mufflerDataBackEnd import MufflerVPRPlotLoader as plotLoader
class MufflerVPR(object):
exposed = True
def __init__(self, dataService):
self.dataService = dataService
@cherrypy.expose
def index(self):
return open('muffler/public/index.html')
@cherrypy.expose
def picture(self):
return ''' <img src="image.png" width="640" height="480" border="0" /> '''
@cherrypy.expose
@cherrypy.tools.json_out()
def generate(self, **params):
return self.data
@cherrypy.expose
def image_png(self, **params):
img = StringIO()
plotLoader().generateVPRPlot(self.dataService.getData(), img)
img.seek(0)
return cherrypy.lib.static.serve_fileobj(img,
content_type="png",
name="image.png")
# def plot(self, image):
# x = np.linspace(0, 10)
# y = np.sin(x)
# plt.clf()
# plt.plot(x, y)
# plt.savefig(image, format='png')
class MufflerVPRWebService(object):
exposed = True
def __init__(self, file_name):
print 'Initializing MufflerVPRWebService --------------------------------------------'
self.dataBN = MufflerVPRDataBackEnd(file_name)
def getData(self):
return self.dataBN.getDFData()
# @cherrypy.tools.accept(media='text/plain')
@cherrypy.tools.json_out()
def GET(self, **params):
# return cherrypy.session['mystring']
# self.data[1]['text'] = "well..." + datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
return self.dataBN.getDFDictData() # Output dict data in json format
@cherrypy.tools.json_out()
def POST(self, label='label1', length='100', width='100', height='100', power='100'):
# some_string = ''.join(random.sample(string.hexdigits, int(length)))
if self.dataBN.getLength() < 42:
self.dataBN.addDFDataRow(label, length, width, height, power)
else:
self.dataBN.loadData()
self.dataBN.addDFDataRow(label, length, width, height, power)
# cherrypy.session['mystring'] = some_string
return self.dataBN.getDFDictData() # Output dict data in json format
def PUT(self, another_string):
cherrypy.session['mystring'] = another_string
def DELETE(self):
cherrypy.session.pop('mystring', None)
| liufuyang/CS50_final_project | mufflerVPR/muffler.py | Python | mit | 2,568 |
# Copyright (C) 2012 David Rusk
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Unit tests for the loader module.
@author: drusk
"""
import unittest
from hamcrest import assert_that
from pml.data.loader import load
from test import base_tests
from test.matchers.pandas_matchers import equals_series
class LoaderTest(base_tests.BaseFileLoadingTest):
def test_load_csv(self):
data_set = load(self.relative_to_base("datasets/3f_header.csv"),
has_ids=False)
self.assertEqual(data_set.num_features(), 3)
self.assertEqual(data_set.num_samples(), 4)
def test_load_csv_no_header(self):
data_set = load(self.relative_to_base("datasets/3f_no_header.csv"),
has_header=False, has_ids=False)
self.assertEqual(data_set.num_features(), 3)
self.assertEqual(data_set.num_samples(), 4)
def test_load_tsv(self):
data_set = load(self.relative_to_base("datasets/3f_header.tsv"),
delimiter="\t", has_ids=False)
self.assertEqual(data_set.num_features(), 3)
self.assertEqual(data_set.num_samples(), 4)
def test_load_has_ids(self):
dataset = load(self.relative_to_base("datasets/3f_ids_header.csv"))
self.assertEqual(dataset.num_features(), 3)
self.assertEqual(dataset.num_samples(), 4)
def test_load_labelled(self):
dataset = load(self.relative_to_base("datasets/3f_ids_header.csv"))
self.assertTrue(dataset.is_labelled())
labels = dataset.get_labels()
assert_that(labels, equals_series({"V01": "c", "V02": "b", "V03": "b",
"V04": "a"}))
def test_load_unlabelled(self):
dataset = load(self.relative_to_base("datasets/"
"3f_ids_header_no_labels.csv"),
has_labels=False)
self.assertFalse(dataset.is_labelled())
self.assertEqual(dataset.num_features(), 3)
self.assertTrue(dataset.get_labels() is None)
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
| drusk/pml | test/test_pml/test_data/test_loader.py | Python | mit | 3,218 |
import pytest
import json
from datetime import datetime, timedelta, timezone
from test.testexception import AuthorizationError, UnprocessableError
class TestEvent:
def test_system_errors(self, helper):
user, device = helper.given_new_user_with_device(self, "error_maker")
new_event_name = "systemError"
unit_name = "coffeeMaker"
lines = ["Failure due to {} existing with status 12".format("barrys v2500 is out of milk")]
lines2 = ["Failure due to {} existing with status 12".format("franks rocket has no internet")]
lines3 = ["Failure due to {} existing with status 12".format("franks rocket is out of milk")]
details = {"version": 1, "unitName": unit_name, "activeState": "failed"}
details["logs"] = lines
screech = device.record_event(new_event_name, details)
details["logs"] = lines2
screech2 = device.record_event(new_event_name, details)
details["logs"] = lines3
screech2 = device.record_event(new_event_name, details)
errors = user.can_see_event_errors()
assert unit_name in errors
service = errors[unit_name]
assert len(service["errors"]) == 2
assert set(len(error["similar"]) for error in service["errors"]) == set([1, 2])
def test_cannot_create_event_type_with_funny_chars(self, helper):
not_doer = helper.given_new_device(self, "The dont doer")
with pytest.raises(UnprocessableError):
not_doer.record_event("Type with spaces", {"lure-id": "possum_screech"})
with pytest.raises(UnprocessableError):
not_doer.record_event("Type_with_underscores", {"lure-id": "possum_screech"})
with pytest.raises(UnprocessableError):
not_doer.record_event("Type with $", {"lure-id": "possum_screech"})
def test_can_create_new_event(self, helper):
doer = helper.given_new_device(self, "The Do-er")
new_event_name = "E-" + helper.random_id()
screech = doer.record_event(new_event_name, {"lure-id": "possum_screech"})
screech2 = doer.record_event(new_event_name, {"lure-id": "possum_screech"})
print("Then these events with the same details should use the same eventDetailId.")
assert screech == screech2, "And events with the same details should use the same eventDetailId"
howl = doer.record_event(new_event_name, {"lure-id": "possum_howl"})
print("Then the events with some different details should have different eventDetailIds.")
assert screech != howl, "Events with different details should link to different eventDetailIds"
no_lure_id = doer.record_event(new_event_name, "")
print("Then the event with no details should should have a different eventDetailId.")
assert screech != no_lure_id, "Events with no details should link to different eventDetailId."
def test_can_upload_event_for_device(self, helper):
data_collector, device = helper.given_new_user_with_device(self, "data_collector")
# check there are no events on this device
data_collector.cannot_see_events()
print(" and data_collector uploads a event on behalf of the device")
eventid = data_collector.record_event(device, "test", {"foo": "bar"})
print("Then 'data_collector' should be able to see that the device has an event")
assert len(data_collector.can_see_events()) == 1
print("And super users should be able to see that the device has an event")
assert len(helper.admin_user().can_see_events(device)) == 1
print("But a new user shouldn't see any device events")
user_without_device = helper.given_new_user(self, "grant")
user_without_device.cannot_see_events()
print("And should not be able to upload events for a device")
with pytest.raises(AuthorizationError):
user_without_device.record_event(device, "test2", {"foo2": "bar2"})
def test_devices_share_events(self, helper):
shaker = helper.given_new_device(self, "The Shaker")
new_event_name = "E-" + helper.random_id()
sameDetails = shaker.record_event(new_event_name, {"lure-id": "possum_screech"})
print(" and ", end="")
actioner = helper.given_new_device(self, "Actioner")
sameDetailsDifferentDevice = actioner.record_event(new_event_name, {"lure-id": "possum_screech"})
print("Then the devices should share the same eventDetailId.")
assert (
sameDetails == sameDetailsDifferentDevice
), "EventsDetails should be able to be linked to from different devices"
def test_can_get_events(self, helper):
fred, freds_device = helper.given_new_user_with_device(self, "freddie")
# check there are no events on this device
fred.cannot_see_events()
freds_device.record_event("playLure", {"lure-id": "possum_screech"})
print("Then 'freddie' should be able to see that the device has an event")
assert len(fred.can_see_events()) == 1
print("And super users should be able to see that the device has an event")
assert len(helper.admin_user().can_see_events(freds_device)) == 1
print("But a new user shouldn't see any device events")
helper.given_new_user(self, "grant").cannot_see_events()
def test_should_be_able_to_upload_several_events_at_same_time(self, helper):
rocker = helper.given_new_device(self, "The Rocker")
detailId = rocker.record_event("playLure", {"lure-id": "possum_screecher"})
rocker.record_three_events_at_once(detailId)
print("And super users should be able to see get all four events for the device")
assert len(helper.admin_user().can_see_events(rocker)) == 4
def test_get_event_attributes_returned(self, helper):
boombox = helper.given_new_device(self, "boombox")
description = "E_" + helper.random_id()
boombox.record_event("audio-bait-played", {"lure-id": "possum_screams", "description": description})
event = helper.admin_user().can_see_events(boombox)[0]
print("Then get events returns an event")
print(" with DeviceId = '{}'".format(boombox.get_id()))
assert event["DeviceId"] == boombox.get_id()
print(" and EventDetail.type = 'audio-bait-played'")
assert event["EventDetail"]["type"] == "audio-bait-played"
print(" and EventDetail.details.lure-id = 'possum_screems'")
assert event["EventDetail"]["details"]["lure-id"] == "possum_screams"
print(" and EventDetail.details.description = '{}'".format(description))
assert event["EventDetail"]["details"]["description"] == description
def test_time_filtering(self, helper):
fred, freds_device = helper.given_new_user_with_device(self, "freddie")
now = datetime.now(tz=timezone.utc)
freds_device.record_event("playLure", {"lure-id": "possum_screech"}, [now])
assert len(fred.can_see_events()) == 1
sec = timedelta(seconds=1)
# Window which covers event
assert fred.can_see_events(startTime=now - sec, endTime=now + sec)
# Window which doesn't cover event.
assert not fred.can_see_events(startTime=now - (2 * sec), endTime=now - sec)
# Just end time, before the event
assert not fred.can_see_events(endTime=now - sec)
# Just end time, after the event
assert fred.can_see_events(endTime=now + sec)
# Just start time, after the event
assert not fred.can_see_events(startTime=now + sec)
# Just start time, on the event
assert fred.can_see_events(startTime=now)
def test_event_filtering(self, helper):
georgina, georgina_device = helper.given_new_user_with_device(self, "georgina")
georgina_device.record_event("play-Lure", {"lure-id": "possum_screech"})
georgina_device.record_event("software", {"recorder": "v1.3"})
assert len(georgina.can_see_events()) == 2
assert georgina.gets_first_event(type="software")["type"] == "software"
assert georgina.gets_first_event(type="play-Lure")["type"] == "play-Lure"
def test_latest_first(self, helper):
lily, lily_device = helper.given_new_user_with_device(self, "lily")
lily_device.record_event("first-event", {"lure-id": "possum_screech"})
lily_device.record_event("second-event", {"recorder": "v1.3"})
assert "first-event" == lily.gets_first_event()["type"]
assert "second-event" == lily.gets_first_event(latest="true")["type"]
| TheCacophonyProject/Full_Noise | test/test_event.py | Python | agpl-3.0 | 8,627 |
# -*- coding: utf-8 -*-
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
__author__ = "Ole Christian Weidner"
__copyright__ = "Copyright 2011-2012, Ole Christian Weidner"
__license__ = "MIT"
import bliss.saga
from bliss.saga.Object import Object
class File(Object):
'''Loosely represents a SAGA file as defined in GFD.90
The saga.filesystem.File class represents, as the name indicates,
a file on some (local or remote) filesystem. That class offers
a number of operations on that file, such as copy, move and remove::
# get a file handle
file = saga.filesystem.File("sftp://localhost/tmp/data/data.bin")
# copy the file
file.copy ("sftp://localhost/tmp/data/data.bak")
# move the file
file.move ("sftp://localhost/tmp/data/data.new")
'''
######################################################################
##
def __init__(self, url, flags=None, session=None):
'''Construct a new file object
@param url: Url of the (remote) file
@type url: L{Url}
The specified file is expected to exist -- otherwise a DoesNotExist
exception is raised. Also, the URL must point to a file (not to
a directory), otherwise a BadParameter exception is raised.
Example::
# get a file handle
file = saga.filesystem.File("sftp://localhost/tmp/data/data.bin")
# print the file's size
print file.get_size ()
'''
Object.__init__(self, session=session)
self._apitype = 'saga.filesystem'
if type(url) == str:
self._url = bliss.saga.Url(str(url))
elif type(url) == bliss.saga.Url:
self._url = url
else:
raise bliss.saga.Exception(bliss.saga.Error.NoSuccess,
"File constructor expects str or bliss.saga.Url type as 'url' parameter")
if flags is None:
_flags = 0
else:
_flags = flags
self._plugin = Object._get_plugin(self) # throws 'NoSuccess' on error
self._plugin.register_file_object(self, flags)
self._logger.info("Bound to plugin %s" % (repr(self._plugin)))
######################################################################
##
def __del__(self):
'''Delete the file object in a civilised fashion.
'''
if self._plugin is not None:
self._plugin.unregister_file_object(self)
else:
pass # can't throw here
######################################################################
##
def get_url(self):
'''Return the complete url pointing to the file.
The call will return the complete url pointing to
this file as a saga.Url object::
# print URL of a file
file = saga.filesystem.File("sftp://localhost/etc/passwd")
print file.get_url()
'''
if self._plugin is None:
raise bliss.saga.Exception(bliss.saga.Error.NoSuccess,
"Object not bound to a plugin")
else:
return self._url
######################################################################
##
def copy(self, target, flags=None):
'''Copy the file to another location
@param target: Url of the copy target.
@param flags: Flags to use for the operation.
The file is copied to the given target location. The target URL must
be an absolute path, and can be a target file name or target
directory name. If the target file exists, it is overwritten::
# copy a file
file = saga.filesystem.Directory("sftp://localhost/tmp/data/data.bin")
file.copy ("sftp://localhost/tmp/data/data.bak")
'''
if self._plugin is None:
raise bliss.saga.Exception(bliss.saga.Error.NoSuccess,
"Object not bound to a plugin")
else:
if type(target) == str:
_target_url = bliss.saga.Url(str(target))
elif type(target) == bliss.saga.Url:
_target_url = target
else:
raise bliss.saga.Exception(bliss.saga.Error.NoSuccess,
"File.copy() expects str or bliss.saga.Url type as 'target' parameter")
if flags is None:
_flags = 0
else:
_flags = flags
return self._plugin.file_copy(self, _target_url, _flags)
######################################################################
##
def move(self, target, flags=None):
'''Move the file to another location
@param target: Url of the move target.
@param flags: Flags to use for the operation.
The file is copied to the given target location. The target URL must
be an absolute path, and can be a target file name or target
directory name. If the target file exists, it is overwritten::
# copy a file
file = saga.filesystem.Directory("sftp://localhost/tmp/data/data.bin")
file.move ("sftp://localhost/tmp/data/data.bak")
'''
if self._plugin is None:
raise bliss.saga.Exception(bliss.saga.Error.NoSuccess,
"Object not bound to a plugin")
else:
if type(target) == str:
_target_url = bliss.saga.Url(str(target))
elif type(target) == bliss.saga.Url:
_target_url = target
else:
raise bliss.saga.Exception(bliss.saga.Error.NoSuccess,
"File.move() expects str or bliss.saga.Url type as 'target' parameter")
if flags is None:
_flags = 0
else:
_flags = flags
return self._plugin.file_move(self, _target_url, _flags)
######################################################################
##
def remove(self):
'''Delete the file '''
if self._plugin is None:
raise bliss.saga.Exception(bliss.saga.Error.NoSuccess,
"Object not bound to a plugin")
else:
return self._plugin.file_remove(self)
######################################################################
##
def get_size(self):
'''Returns the size of a file (in bytes)
Example::
# get a file handle
file = saga.filesystem.File("sftp://localhost/tmp/data/data.bin")
# print the file's size
print file.get_size ()
'''
if self._plugin is None:
raise bliss.saga.Exception(bliss.saga.Error.NoSuccess,
"Object not bound to a plugin")
else:
return self._plugin.file_get_size(self)
| saga-project/bliss | bliss/saga/filesystem/File.py | Python | mit | 6,940 |
###############################################################################
# ilastik: interactive learning and segmentation toolkit
#
# Copyright (C) 2011-2014, the ilastik developers
# <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# In addition, as a special exception, the copyright holders of
# ilastik give you permission to combine ilastik with applets,
# workflows and plugins which are not covered under the GNU
# General Public License.
#
# See the LICENSE file for details. License information is also available
# on the ilastik web site at:
# http://ilastik.org/license.html
###############################################################################
from ilastik.applets.featureSelection import FeatureSelectionApplet
from opIIBoostFeatureSelection import OpIIBoostFeatureSelection
class IIBoostFeatureSelectionApplet( FeatureSelectionApplet ):
"""
This applet is a subclass of the standard feature selection applet from the pixel classification workflow,
except it uses a variant of the top-level operator which adds channels needed for the IIBoost classifier.
"""
def __init__( self, workflow, guiName, projectFileGroupName, filter_implementation='Original' ):
super(IIBoostFeatureSelectionApplet, self).__init__(workflow, guiName, projectFileGroupName, filter_implementation='Original')
@property
def singleLaneOperatorClass(self):
return OpIIBoostFeatureSelection
| nielsbuwen/ilastik | ilastik/applets/iiboostFeatureSelection/iiboostFeatureSelectionApplet.py | Python | gpl-3.0 | 1,704 |
# -*- coding: utf-8 -*-
#
# javauserguide documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 21 21:46:23 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'javauserguide'
copyright = u'2015, Patrick Baird'
author = u'Patrick Baird'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'V0.1'
# The full version, including alpha/beta/rc tags.
release = 'V0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'javauserguidedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'javauserguide.tex', u'javauserguide Documentation',
u'Patrick Baird', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'javauserguide', u'javauserguide Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'javauserguide', u'javauserguide Documentation',
author, 'javauserguide', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
#epub_basename = project
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or 'en' if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the Pillow.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
| BiuroCo/mega | bindings/doc/java/sphinx/source/conf.py | Python | bsd-2-clause | 11,589 |
import string
import os, sys
import types, traceback
try:
if sys.version_info[0] == 3:
from tkinter import Tk
from tkinter import messagebox
else:
from Tkinter import Tk
import tkMessageBox as messagebox
_hasTk = 1
except:
_hasTk = 0
def write_CSV(f,x):
"""write list x to file f in comma-separated-value format."""
for e in x:
f.write( repr(e)+',')
f.write('\n')
def _print_value(name, value, unitstr):
print(string.rjust(name, 15)+ \
string.rjust('%10.5e' %value, 15) + ' ' + \
string.ljust(unitstr,5))
def handleError(message = '<error>', window = None,
fatal = 0, warning = 0, options = None):
# Print the message to the terminal, since this can at least be copied and
# pasted, unlike the contents of the dialog box.
print(message)
if warning:
messagebox.showwarning(title = 'Warning', message = message,
parent = window)
else:
m = messagebox.showerror(title = 'Error', message = message,
parent = window)
| imitrichev/cantera | interfaces/cython/cantera/mixmaster/utilities.py | Python | bsd-3-clause | 1,126 |
# Copyright (c) 2013 Tencent Inc.
# All rights reserved.
#
# Author: Feng Chen <[email protected]>
"""Define resource_library target
"""
import os
import blade
import build_rules
from cc_targets import CcTarget
class ResourceLibrary(CcTarget):
"""A scons cc target subclass.
This class is derived from SconsCCTarget and it is the scons class
to generate resource library rules.
"""
def __init__(self,
name,
srcs,
deps,
optimize,
extra_cppflags,
blade,
kwargs):
"""Init method.
Init the cc target.
"""
CcTarget.__init__(self,
name,
'resource_library',
srcs,
deps,
None,
'',
[],
[],
[],
optimize,
extra_cppflags,
[],
blade,
kwargs)
def _generate_header_files(self):
"""Whether this target generates header files during building."""
return True
def scons_rules(self):
"""scons_rules.
It outputs the scons rules according to user options.
"""
self._prepare_to_generate_rule()
self._setup_cc_flags()
env_name = self._env_name()
(out_dir, res_file_index) = self._resource_library_rules_helper()
self.data['res_srcs'] = [os.path.join(out_dir, res_file_index + '.c')]
for src in self.srcs:
src = os.path.normpath(src)
src_path = os.path.join(self.path, src)
c_src_name = '%s.c' % self._regular_variable_name(src)
c_src_path = os.path.join(out_dir, c_src_name)
v_src = self._var_name_of(src_path)
self._write_rule('%s = %s.ResourceFile("%s", "%s")' % (
v_src, env_name, c_src_path, src_path))
self.data['res_srcs'].append(c_src_path)
self._resource_library_rules_objects()
self._cc_library()
def _resource_library_rules_objects(self):
"""Generate resource library object rules. """
env_name = self._env_name()
objs_name = self._objs_name()
objs = []
res_srcs = self.data['res_srcs']
res_objects = {}
path = self.path
for src in res_srcs:
base_src_name = self._regular_variable_name(os.path.basename(src))
src_name = base_src_name + '_' + self.name + '_res'
if src_name not in res_objects:
res_objects[src_name] = (
'%s_%s_object' % (
base_src_name,
self._regular_variable_name(self.name)))
target_path = os.path.join(self.build_path,
path,
'%s.objs' % self.name,
base_src_name)
self._write_rule(
'%s = %s.SharedObject(target="%s" + top_env["OBJSUFFIX"]'
', source="%s")' % (res_objects[src_name],
env_name,
target_path,
src))
objs.append(res_objects[src_name])
self._write_rule('%s = [%s]' % (objs_name, ','.join(objs)))
def _resource_library_rules_helper(self):
"""The helper method to generate scons resource rules, mainly applies builder. """
env_name = self._env_name()
out_dir = os.path.join(self.build_path, self.path)
res_index_name = self._regular_variable_name(self.name)
res_index_source = res_index_name + '.c'
res_index_header = res_index_name + '.h'
src_list = []
for src in self.srcs:
src_path = os.path.join(self.path, src)
src_list.append(src_path)
v_index = self._var_name_of(self.name, 'index')
res_index_header_path = os.path.join(out_dir, res_index_header)
res_index_source_path = os.path.join(out_dir, res_index_source)
self._write_rule('%s["SOURCE_PATH"] = "%s"' % (env_name, self.path))
self._write_rule('%s["TARGET_NAME"] = "%s"' % (env_name, res_index_name))
self._write_rule('%s = %s.ResourceIndex(["%s", "%s"], %s)' % (
v_index, env_name, res_index_source_path, res_index_header_path,
src_list))
return (out_dir, res_index_name)
def resource_library(name,
srcs=[],
deps=[],
optimize=[],
extra_cppflags=[],
**kwargs):
"""scons_resource_library. """
target = ResourceLibrary(name,
srcs,
deps,
optimize,
extra_cppflags,
blade.blade,
kwargs)
blade.blade.register_target(target)
build_rules.register_function(resource_library)
| project-zerus/blade | src/blade/resource_library_target.py | Python | bsd-3-clause | 5,386 |
# -*- coding: utf-8 -*-
"""Module containing classes with common behaviour for both VMs and Instances of all types."""
from datetime import date
from functools import partial
from wrapanapi import exceptions
from cfme import js
from cfme.common.vm_console import VMConsole
from cfme.exceptions import (
VmOrInstanceNotFound, ItemNotFound, OptionNotAvailable, UnknownProviderType)
from cfme.fixtures import pytest_selenium as sel
from cfme.web_ui import (
AngularCalendarInput, AngularSelect, Form, InfoBlock, Input, Select, fill, flash,
form_buttons, toolbar, PagedTable, CheckboxTable,
DriftGrid, BootstrapTreeview
)
import cfme.web_ui.toolbar as tb
from cfme.common import WidgetasticTaggable
from cfme.utils import version, ParamClassName
from cfme.utils.appliance import Navigatable
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.log import logger
from cfme.utils.pretty import Pretty
from cfme.utils.timeutil import parsetime
from cfme.utils.update import Updateable
from cfme.utils.virtual_machines import deploy_template
from cfme.utils.wait import wait_for, TimedOutError
from . import PolicyProfileAssignable, SummaryMixin
access_btn = partial(toolbar.select, "Access")
cfg_btn = partial(toolbar.select, "Configuration")
lcl_btn = partial(toolbar.select, "Lifecycle")
mon_btn = partial(toolbar.select, 'Monitoring')
pol_btn = partial(toolbar.select, "Policy")
pwr_btn = partial(toolbar.select, "Power")
retire_remove_button = "//span[@id='remove_button']/a/img|//a/img[contains(@src, '/clear')]"
set_ownership_form = Form(fields=[
('user_name', AngularSelect('user_name')),
('group_name', AngularSelect('group_name')),
('create_button', form_buttons.save),
('reset_button', form_buttons.reset),
('cancel_button', form_buttons.cancel)
])
drift_table = CheckboxTable("//th[normalize-space(.)='Timestamp']/ancestor::table[1]")
drift_section = BootstrapTreeview('all_sectionsbox')
def base_types(template=False):
from pkg_resources import iter_entry_points
search = "template" if template else "vm"
return {
ep.name: ep.resolve() for ep in iter_entry_points('manageiq.{}_categories'.format(search))
}
def instance_types(category, template=False):
from pkg_resources import iter_entry_points
search = "template" if template else "vm"
return {
ep.name: ep.resolve() for ep in iter_entry_points(
'manageiq.{}_types.{}'.format(search, category))
}
def all_types(template=False):
all_types = base_types(template)
for category in all_types.keys():
all_types.update(instance_types(category, template))
return all_types
class _TemplateMixin(object):
pass
class BaseVM(Pretty, Updateable, PolicyProfileAssignable, WidgetasticTaggable,
SummaryMixin, Navigatable):
"""Base VM and Template class that holds the largest common functionality between VMs,
instances, templates and images.
In order to inherit these, you have to implement the ``on_details`` method.
"""
pretty_attrs = ['name', 'provider', 'template_name']
# Forms
edit_form = Form(
fields=[
('custom_ident', Input("custom_1")),
('description_tarea', "//textarea[@id='description']"),
('parent_sel', {
version.LOWEST: Select("//select[@name='chosen_parent']"),
"5.5": AngularSelect("chosen_parent")}),
('child_sel', Select("//select[@id='kids_chosen']", multi=True)),
('vm_sel', Select("//select[@id='choices_chosen']", multi=True)),
('add_btn', "//img[@alt='Move selected VMs to left']"),
('remove_btn', "//img[@alt='Move selected VMs to right']"),
('remove_all_btn', "//img[@alt='Move all VMs to right']"),
])
###
# Factory class methods
#
@classmethod
def factory(cls, vm_name, provider, template_name=None, template=False):
"""Factory class method that determines the correct subclass for given provider.
For reference how does that work, refer to the entrypoints in the setup.py
Args:
vm_name: Name of the VM/Instance as it appears in the UI
provider: The provider object (not the string!)
template_name: Source template name. Useful when the VM/Instance does not exist and you
want to create it.
template: Whether the generated object class should be VM/Instance or a template class.
"""
try:
return all_types(template)[provider.type](vm_name, provider, template_name)
except KeyError:
# Matching via provider type failed. Maybe we have some generic classes for infra/cloud?
try:
return all_types(template)[provider.category](vm_name, provider, template_name)
except KeyError:
raise UnknownProviderType(
'Unknown type of provider CRUD object: {}'
.format(provider.__class__.__name__))
###
# To be set or implemented
#
ALL_LIST_LOCATION = None
TO_OPEN_EDIT = None # Name of the item in Configuration that puts you in the form
QUADICON_TYPE = "vm"
# Titles of the delete buttons in configuration
REMOVE_SELECTED = {'5.6': 'Remove selected items',
'5.6.2.2': 'Remove selected items from the VMDB',
'5.7': 'Remove selected items'}
REMOVE_SINGLE = {'5.6': 'Remove Virtual Machine',
'5.6.2.2': 'Remove from the VMDB',
'5.7': 'Remove Virtual Machine'}
RETIRE_DATE_FMT = {version.LOWEST: parsetime.american_date_only_format,
'5.7': parsetime.american_minutes_with_utc,
'5.9': parsetime.saved_report_title_format}
_param_name = ParamClassName('name')
###
# Shared behaviour
#
def __init__(self, name, provider, template_name=None, appliance=None):
super(BaseVM, self).__init__()
Navigatable.__init__(self, appliance=appliance)
if type(self) in {BaseVM, VM, Template}:
raise NotImplementedError('This class cannot be instantiated.')
self.name = name
self.provider = provider
self.template_name = template_name
###
# Properties
#
@property
def is_vm(self):
return not isinstance(self, _TemplateMixin)
@property
def quadicon_type(self):
return self.QUADICON_TYPE
@property
def paged_table(self):
return PagedTable('//table')
###
# Methods
#
def check_compliance(self, timeout=240):
"""Initiates compliance check and waits for it to finish.
TODO This should be refactored as it's done `Host.check_compliance`. It shouldn't return
anything. `compliant` property should use `compliance_status`.
"""
original_state = self.compliance_status
cfg_btn("Refresh Relationships and Power States", invokes_alert=True)
sel.handle_alert()
flash.assert_no_errors()
pol_btn("Check Compliance of Last Known Configuration", invokes_alert=True)
sel.handle_alert()
flash.assert_no_errors()
wait_for(
lambda: self.compliance_status != original_state,
num_sec=timeout, delay=5, message="compliance of {} checked".format(self.name)
)
return self.compliant
@property
def compliance_status(self):
"""Returns the title of the compliance infoblock. The title contains datetime so it can be
compared.
Returns:
:py:class:`NoneType` if no title is present (no compliance checks before), otherwise str
"""
self.load_details(refresh=True)
return InfoBlock("Compliance", "Status").title
@property
def compliant(self):
"""Check if the VM is compliant
Returns:
:py:class:`NoneType` if the VM was never verified, otherwise :py:class:`bool`
"""
text = self.get_detail(properties=("Compliance", "Status")).strip().lower()
if text == "never verified":
return None
elif text.startswith("non-compliant"):
return False
elif text.startswith("compliant"):
return True
else:
raise ValueError("{} is not a known state for compliance".format(text))
@property
def console_handle(self):
'''
The basic algorithm for getting the consoles window handle is to get the
appliances window handle and then iterate through the window_handles till we find
one that is not the appliances window handle. Once we find this check that it has
a canvas widget with a specific ID
'''
browser = self.appliance.browser.widgetastic
appliance_handle = browser.window_handle
cur_handles = browser.selenium.window_handles
logger.info("Current Window Handles: {}".format(cur_handles))
for handle in cur_handles:
if handle != appliance_handle:
# FIXME: Add code to verify the tab has the correct widget
# for a console tab.
return handle
@property
def vm_console(self):
"""Get the consoles window handle, and then create a VMConsole object, and store
the VMConsole object aside.
"""
console_handle = self.console_handle
if console_handle is None:
raise TypeError("Console handle should not be None")
appliance_handle = self.appliance.browser.widgetastic.window_handle
logger.info("Creating VMConsole:")
logger.info(" appliance_handle: {}".format(appliance_handle))
logger.info(" console_handle: {}".format(console_handle))
logger.info(" name: {}".format(self.name))
return VMConsole(appliance_handle=appliance_handle,
console_handle=console_handle,
vm=self)
def delete(self, cancel=False, from_details=False):
"""Deletes the VM/Instance from the VMDB.
Args:
cancel: Whether to cancel the action in the alert.
from_details: Whether to use the details view or list view.
"""
if from_details:
self.load_details(refresh=True)
cfg_btn(self.REMOVE_SINGLE, invokes_alert=True)
else:
self.find_quadicon().check()
cfg_btn(self.REMOVE_SELECTED, invokes_alert=True)
sel.handle_alert(cancel=cancel)
@property
def exists(self):
"""Checks presence of the quadicon in the CFME."""
try:
self.find_quadicon()
return True
except VmOrInstanceNotFound:
return False
@property
def ip_address(self):
"""Fetches IP Address of VM"""
return self.provider.mgmt.get_ip_address(self.name)
@property
def is_retired(self):
""""Check retirement status of vm"""
self.summary.reload()
if self.summary.lifecycle.retirement_date.text_value.lower() != 'never':
try:
return self.summary.lifecycle.retirement_state.text_value.lower() == 'retired'
except AttributeError:
return False
else:
return False
def find_quadicon(self, from_any_provider=False, use_search=True):
"""Find and return a quadicon belonging to a specific vm
Args:
from_any_provider: Whether to look for it anywhere (root of the tree). Useful when
looking up archived or orphaned VMs
Returns: entity of appropriate type
Raises: VmOrInstanceNotFound
"""
# todo :refactor this method replace it with vm methods like get_state
if from_any_provider:
view = navigate_to(self, 'All')
else:
view = navigate_to(self, 'AllForProvider', use_resetter=False)
if 'Grid View' != view.toolbar.view_selector.selected:
view.toolbar.view_selector.select('Grid View')
try:
return view.entities.get_entity(name=self.name, surf_pages=True, use_search=use_search)
except ItemNotFound:
raise VmOrInstanceNotFound("VM '{}' not found in UI!".format(self.name))
def get_detail(self, properties=None, icon_href=False):
"""Gets details from the details infoblock
The function first ensures that we are on the detail page for the specific VM/Instance.
Args:
properties: An InfoBlock title, followed by the Key name, e.g. "Relationships", "Images"
Returns:
A string representing the contents of the InfoBlock's value.
"""
self.load_details(refresh=True)
if icon_href:
return InfoBlock.icon_href(*properties)
else:
return InfoBlock.text(*properties)
def open_console(self, console='VM Console', invokes_alert=False, cancel=False):
"""
Initiates the opening of one of the console types supported by the Access
button. Presently we only support VM Console, which is the HTML5 Console.
In case of VMware provider it could be VMRC, VNC/HTML5, WebMKS, but we only
support VNC/HTML5.
Possible values for 'console' could be 'VM Console' and 'Web Console', but Web
Console is not supported as well.
Args:
console: one of the supported console types given by the Access button.
invokes_alert: If the particular console will invoke a CFME popup/alert
setting this to true will handle this.
cancel: Allows one to cancel the operation if the popup/alert occurs.
"""
# TODO: implement vmrc vm console
if console not in ['VM Console']:
raise NotImplementedError('Not supported console type: {}'.format(console))
view = navigate_to(self, 'Details')
# Click console button given by type
view.toolbar.access.item_select(console, handle_alert=None
if invokes_alert is False else True)
self.vm_console
def open_details(self, properties=None):
"""Clicks on details infoblock"""
self.load_details(refresh=True)
sel.click(InfoBlock(*properties))
@classmethod
def get_first_vm(cls, provider):
"""Get first VM/Instance."""
# todo: move this to base provider ?
view = navigate_to(cls, 'AllForProvider', provider=provider)
return view.entities.get_first_entity()
@property
def last_analysed(self):
"""Returns the contents of the ``Last Analysed`` field in summary"""
return self.get_detail(properties=('Lifecycle', 'Last Analyzed')).strip()
def load_details(self, refresh=False, from_any_provider=False):
"""Navigates to an VM's details page.
Args:
refresh: Refreshes the VM page if already there
from_any_provider: Archived/Orphaned VMs need this
Raises:
VmOrInstanceNotFound:
When unable to find the VM passed
"""
if from_any_provider:
navigate_to(self, 'AnyProviderDetails', use_resetter=False)
else:
navigate_to(self, 'Details', use_resetter=False)
if refresh:
toolbar.refresh()
self.browser.plugin.ensure_page_safe()
def open_edit(self):
"""Loads up the edit page of the object."""
self.load_details(refresh=True)
cfg_btn(self.TO_OPEN_EDIT)
def open_timelines(self):
"""Navigates to an VM's timeline page.
Returns:
:py:class:`TimelinesView` object
"""
return navigate_to(self, 'Timelines')
def rediscover(self):
"""Deletes the VM from the provider and lets it discover again"""
self.delete(from_details=True)
self.wait_for_delete()
self.provider.refresh_provider_relationships()
self.wait_to_appear()
def rediscover_if_analysis_data_present(self):
"""Rediscovers the object if it has some analysis data present.
Returns:
Boolean if the rediscovery happened.
"""
if self.last_analysed.lower() != 'never':
self.rediscover()
return True
return False
def refresh_relationships(self, from_details=False, cancel=False, from_any_provider=False):
"""Executes a refresh of relationships.
Args:
from_details: Whether or not to perform action from instance details page
cancel: Whether or not to cancel the refresh relationships action
"""
if from_details:
self.load_details()
else:
self.find_quadicon(from_any_provider=from_any_provider).check()
cfg_btn('Refresh Relationships and Power States', invokes_alert=True)
sel.handle_alert(cancel=cancel)
@property
def retirement_date(self):
"""Returns the retirement date of the selected machine, or 'Never'
Returns:
:py:class:`str` object
"""
return self.get_detail(properties=("Lifecycle", "Retirement Date")).strip()
def smartstate_scan(self, cancel=False, from_details=False):
"""Initiates fleecing from the UI.
Args:
cancel: Whether or not to cancel the refresh relationships action
from_details: Whether or not to perform action from instance details page
"""
if from_details:
self.load_details(refresh=True)
else:
self.find_quadicon().check()
cfg_btn('Perform SmartState Analysis', invokes_alert=True)
sel.handle_alert(cancel=cancel)
def wait_to_disappear(self, timeout=600, load_details=True):
"""Wait for a VM to disappear within CFME
Args:
timeout: time (in seconds) to wait for it to appear
"""
wait_for(
lambda: self.exists,
num_sec=timeout, delay=30, fail_func=sel.refresh, fail_condition=True,
message="wait for vm to not exist")
wait_for_delete = wait_to_disappear # An alias for more fitting verbosity
def wait_to_appear(self, timeout=600, load_details=True):
"""Wait for a VM to appear within CFME
Args:
timeout: time (in seconds) to wait for it to appear
load_details: when found, should it load the vm details
"""
def _refresh():
self.provider.refresh_provider_relationships()
self.appliance.browser.widgetastic.browser.refresh() # strange because ViaUI
wait_for(
lambda: self.exists,
num_sec=timeout, delay=5, fail_func=_refresh,
message="wait for vm to appear")
if load_details:
self.load_details()
def set_ownership(self, user=None, group=None, click_cancel=False, click_reset=False):
"""Set ownership of the VM/Instance or Template/Image"""
self.find_quadicon(use_search=False).click()
cfg_btn('Set Ownership')
if click_reset:
action = form_buttons.reset
msg_assert = partial(
flash.assert_message_match,
'All changes have been reset'
)
elif click_cancel:
action = form_buttons.cancel
msg_assert = partial(
flash.assert_success_message,
'Set Ownership was cancelled by the user'
)
else:
action = form_buttons.save
msg_assert = partial(
flash.assert_success_message,
'Ownership saved for selected {}'.format(self.VM_TYPE)
)
fill(set_ownership_form, {'user_name': user, 'group_name': group},
action=action)
msg_assert()
def unset_ownership(self):
"""Unset ownership of the VM/Instance or Template/Image"""
# choose the vm code comes here
self.find_quadicon(use_search=False).click()
cfg_btn('Set Ownership')
fill(set_ownership_form, {'user_name': '<No Owner>',
'group_name': 'EvmGroup-administrator'},
action=form_buttons.save)
flash.assert_success_message('Ownership saved for selected {}'.format(self.VM_TYPE))
def date_retire_element(fill_data):
"""We need to call this function that will mimic clicking the calendar, picking the date and
the subsequent callbacks from the server"""
# TODO: Move the code in the Calendar itself? I did not check other calendars
if isinstance(fill_data, date):
date_str = '{}/{}/{}'.format(fill_data.month, fill_data.day, fill_data.year)
else:
date_str = str(fill_data)
sel.execute_script(
js.update_retirement_date_function_script +
"updateDate(arguments[0]);",
date_str
)
class VM(BaseVM):
TO_RETIRE = None
retire_form_click_away = "//label[contains(normalize-space(.), 'Retirement Date')]"
retire_form = Form(fields=[
('date_retire',
{version.LOWEST: AngularCalendarInput("retirement_date", retire_form_click_away),
'5.9': AngularCalendarInput("retirement_date_datepicker", retire_form_click_away)}),
('warn', AngularSelect('retirementWarning'))
])
def retire(self):
self.load_details(refresh=True)
lcl_btn(self.TO_RETIRE, invokes_alert=True)
sel.handle_alert()
flash.assert_success_message(
'Retirement initiated for 1 VM and Instance from the {} Database'.format(version.pick({
version.LOWEST: 'CFME',
'upstream': 'ManageIQ'})))
def power_control_from_provider(self):
raise NotImplementedError("You have to implement power_control_from_provider!")
def power_control_from_cfme(self, option, cancel=True, from_details=False):
"""Power controls a VM from within CFME
Args:
option: corresponds to option values under the power button
cancel: Whether or not to cancel the power operation on confirmation
from_details: Whether or not to perform action from instance details page
Raises:
OptionNotAvailable: option param is not visible or enabled
"""
if (self.is_pwr_option_available_in_cfme(option=option, from_details=from_details)):
pwr_btn(option, invokes_alert=True)
sel.handle_alert(cancel=cancel, check_present=True)
logger.info(
"Power control action of VM/instance %s, option %s, cancel %s executed",
self.name, option, str(cancel))
else:
raise OptionNotAvailable(option + " is not visible or enabled")
def wait_candu_data_available(self, timeout=600):
"""Waits until C&U data are available for this VM/Instance
Args:
timeout: Timeout passed to :py:func:`utils.wait.wait_for`
"""
self.load_details(refresh=True)
wait_for(
lambda: not toolbar.is_greyed('Monitoring', 'Utilization'),
delay=10, handle_exception=True, num_sec=timeout,
fail_func=lambda: toolbar.refresh())
def wait_for_vm_state_change(self, desired_state=None, timeout=300, from_details=False,
with_relationship_refresh=True, from_any_provider=False):
"""Wait for VM to come to desired state.
This function waits just the needed amount of time thanks to wait_for.
Args:
desired_state: on, off, suspended... for available states, see
:py:class:`EC2Instance` and :py:class:`OpenStackInstance`
timeout: Specify amount of time (in seconds) to wait
from_any_provider: Archived/Orphaned vms need this
Raises:
TimedOutError:
When instance does not come up to desired state in specified period of time.
InstanceNotFound:
When unable to find the instance passed
"""
detail_t = ("Power Management", "Power State")
def _looking_for_state_change():
if from_details:
self.load_details(refresh=True)
return self.get_detail(properties=detail_t) == desired_state
else:
return 'currentstate-' + desired_state in self.find_quadicon(
from_any_provider=from_any_provider).data['state']
return wait_for(
_looking_for_state_change,
num_sec=timeout,
delay=30,
fail_func=lambda: self.refresh_relationships(from_details=from_details,
from_any_provider=from_any_provider) if
with_relationship_refresh else None)
def is_pwr_option_available_in_cfme(self, option, from_details=False):
"""Checks to see if a power option is available on the VM
Args:
option: corresponds to option values under the power button,
see :py:class:`EC2Instance` and :py:class:`OpenStackInstance`
from_details: Whether or not to perform action from instance details page
"""
if from_details:
self.load_details(refresh=True)
else:
entity = self.find_quadicon()
entity.check()
try:
return not toolbar.is_greyed('Power', option)
except sel.NoSuchElementException:
return False
def delete_from_provider(self):
logger.info("Begin delete_from_provider")
if self.provider.mgmt.does_vm_exist(self.name):
try:
if self.provider.mgmt.is_vm_suspended(self.name) and self.provider.type != 'azure':
logger.debug("Powering up VM %s to shut it down correctly on %s.",
self.name, self.provider.key)
self.provider.mgmt.start_vm(self.name)
self.provider.mgmt.wait_vm_steady(self.name)
self.provider.mgmt.stop_vm(self.name)
self.provider.mgmt.wait_vm_steady(self.name)
except exceptions.ActionNotSupported:
# Action is not supported on mgmt system. Simply continue
pass
# One more check (for the suspended one)
if self.provider.mgmt.does_vm_exist(self.name):
try:
logger.info("Mgmt System delete_vm")
return self.provider.mgmt.delete_vm(self.name)
except exceptions.VMInstanceNotFound:
# Does not exist already
return True
else:
return True
def create_on_provider(self, timeout=900, find_in_cfme=False, **kwargs):
"""Create the VM on the provider
Args:
timeout: Number of seconds to wait for the VM to appear in CFME
Will not wait at all, if set to 0 (Defaults to ``900``)
"""
deploy_template(self.provider.key, self.name, self.template_name, **kwargs)
if find_in_cfme:
self.provider.refresh_provider_relationships()
self.wait_to_appear(timeout=timeout, load_details=False)
def does_vm_exist_on_provider(self):
"""Check if VM exists on provider itself"""
return self.provider.mgmt.does_vm_exist(self.name)
def set_retirement_date(self, when, warn=None):
"""Sets the retirement date for this Vm object.
It incorporates some magic to make it work reliably since the retirement form is not very
pretty and it can't be just "done".
Args:
when: When to retire. :py:class:`str` in format mm/dd/yyyy of
:py:class:`datetime.datetime` or :py:class:`utils.timeutil.parsetime`.
warn: When to warn, fills the select in the form in case the ``when`` is specified.
"""
# TODO: refactor for retirement nav destinations and widget form fill when child classes
self.load_details()
lcl_btn("Set Retirement Date")
if callable(self.retire_form.date_retire):
# It is the old functiton
sel.wait_for_element("#miq_date_1")
else:
sel.wait_for_element(self.retire_form.date_retire)
if when is None:
try:
wait_for(lambda: sel.is_displayed(retire_remove_button), num_sec=5, delay=0.2)
sel.click(retire_remove_button)
wait_for(lambda: not sel.is_displayed(retire_remove_button), num_sec=10, delay=0.2)
sel.click(form_buttons.save)
except TimedOutError:
pass
else:
if sel.is_displayed(retire_remove_button):
sel.click(retire_remove_button)
wait_for(lambda: not sel.is_displayed(retire_remove_button), num_sec=15, delay=0.2)
fill(self.retire_form.date_retire, when)
wait_for(lambda: sel.is_displayed(retire_remove_button), num_sec=15, delay=0.2)
if warn is not None:
fill(self.retire_form.warn, warn)
sel.click(form_buttons.save)
def equal_drift_results(self, row_text, section, *indexes):
""" Compares drift analysis results of a row specified by it's title text
Args:
row_text: Title text of the row to compare
section: Accordion section where the change happened; this section will be activated
indexes: Indexes of results to compare starting with 0 for first row (latest result).
Compares all available drifts, if left empty (default).
Note:
There have to be at least 2 drift results available for this to work.
Returns:
``True`` if equal, ``False`` otherwise.
"""
# mark by indexes or mark all
self.load_details(refresh=True)
sel.click(InfoBlock("Properties", "Drift History"))
if indexes:
drift_table.select_rows_by_indexes(*indexes)
else:
# We can't compare more than 10 drift results at once
# so when selecting all, we have to limit it to the latest 10
if len(list(drift_table.rows())) > 10:
drift_table.select_rows_by_indexes(*range(0, min(10, len)))
else:
drift_table.select_all()
tb.select("Select up to 10 timestamps for Drift Analysis")
# Make sure the section we need is active/open
sec_apply_btn = "//div[@id='accordion']/a[contains(normalize-space(text()), 'Apply')]"
# Deselect other sections
for other_section in drift_section.child_items():
drift_section.check_node(other_section.text)
drift_section.uncheck_node(other_section.text)
# Activate the required section
drift_section.check_node(section)
sel.click(sec_apply_btn)
if not tb.is_active("All attributes"):
tb.select("All attributes")
drift_grid = DriftGrid()
if any(drift_grid.cell_indicates_change(row_text, i) for i in range(0, len(indexes))):
return False
return True
class Template(BaseVM, _TemplateMixin):
"""A base class for all templates. The constructor is a bit different, it scraps template_name.
"""
def __init__(self, name, provider, template_name=None):
# template_name gets ignored because template does not have a template, logically.
super(Template, self).__init__(name, provider, template_name=None)
def does_vm_exist_on_provider(self):
"""Check if template exists on provider itself"""
return self.provider.mgmt.does_template_exist(self.name)
# For more logical writing of conditions.
does_template_exist_on_provider = does_vm_exist_on_provider
| jkandasa/integration_tests | cfme/common/vm.py | Python | gpl-2.0 | 32,052 |
import ctypes
import numpy
import sys
import os
import os.path
from numpy.compat import asbytes, asstr
def _generate_candidate_libs():
# look for likely library files in the following dirs:
lib_dirs = [os.path.dirname(__file__),
'/lib',
'/usr/lib',
'/usr/local/lib',
'/opt/local/lib',
os.path.join(sys.prefix, 'lib'),
os.path.join(sys.prefix, 'DLLs')
]
if 'HOME' in os.environ:
lib_dirs.append(os.path.join(os.environ['HOME'], 'lib'))
lib_dirs = [ld for ld in lib_dirs if os.path.exists(ld)]
lib_names = ['libfreeimage', 'freeimage'] # should be lower-case!
# Now attempt to find libraries of that name in the given directory
# (case-insensitive and without regard for extension)
lib_paths = []
for lib_dir in lib_dirs:
for lib_name in lib_names:
files = os.listdir(lib_dir)
lib_paths += [os.path.join(lib_dir, lib) for lib in files
if lib.lower().startswith(lib_name) and not
os.path.splitext(lib)[1] in ('.py', '.pyc', '.ini')]
lib_paths = [lp for lp in lib_paths if os.path.exists(lp)]
return lib_dirs, lib_paths
def load_freeimage():
if sys.platform == 'win32':
loader = ctypes.windll
functype = ctypes.WINFUNCTYPE
else:
loader = ctypes.cdll
functype = ctypes.CFUNCTYPE
freeimage = None
errors = []
# First try a few bare library names that ctypes might be able to find
# in the default locations for each platform. Win DLL names don't need the
# extension, but other platforms do.
bare_libs = ['FreeImage', 'libfreeimage.dylib', 'libfreeimage.so',
'libfreeimage.so.3']
lib_dirs, lib_paths = _generate_candidate_libs()
lib_paths = bare_libs + lib_paths
for lib in lib_paths:
try:
freeimage = loader.LoadLibrary(lib)
break
except Exception:
if lib not in bare_libs:
# Don't record errors when it couldn't load the library from
# a bare name -- this fails often, and doesn't provide any
# useful debugging information anyway, beyond "couldn't find
# library..."
# Get exception instance in Python 2.x/3.x compatible manner
e_type, e_value, e_tb = sys.exc_info()
del e_tb
errors.append((lib, e_value))
if freeimage is None:
if errors:
# No freeimage library loaded, and load-errors reported for some
# candidate libs
err_txt = ['%s:\n%s' % (l, str(e.message)) for l, e in errors]
raise RuntimeError('One or more FreeImage libraries were found, but '
'could not be loaded due to the following errors:\n'
'\n\n'.join(err_txt))
else:
# No errors, because no potential libraries found at all!
raise RuntimeError('Could not find a FreeImage library in any of:\n' +
'\n'.join(lib_dirs))
# FreeImage found
@functype(None, ctypes.c_int, ctypes.c_char_p)
def error_handler(fif, message):
raise RuntimeError('FreeImage error: %s' % message)
freeimage.FreeImage_SetOutputMessage(error_handler)
return freeimage
_FI = load_freeimage()
API = {
# All we're doing here is telling ctypes that some of the FreeImage
# functions return pointers instead of integers. (On 64-bit systems,
# without this information the pointers get truncated and crashes result).
# There's no need to list functions that return ints, or the types of the
# parameters to these or other functions -- that's fine to do implicitly.
# Note that the ctypes immediately converts the returned void_p back to a
# python int again! This is really not helpful, because then passing it
# back to another library call will cause truncation-to-32-bits on 64-bit
# systems. Thanks, ctypes! So after these calls one must immediately
# re-wrap the int as a c_void_p if it is to be passed back into FreeImage.
'FreeImage_AllocateT': (ctypes.c_void_p, None),
'FreeImage_FindFirstMetadata': (ctypes.c_void_p, None),
'FreeImage_GetBits': (ctypes.c_void_p, None),
'FreeImage_GetPalette': (ctypes.c_void_p, None),
'FreeImage_GetTagKey': (ctypes.c_char_p, None),
'FreeImage_GetTagValue': (ctypes.c_void_p, None),
'FreeImage_Load': (ctypes.c_void_p, None),
'FreeImage_LockPage': (ctypes.c_void_p, None),
'FreeImage_OpenMultiBitmap': (ctypes.c_void_p, None)
}
# Albert's ctypes pattern
def register_api(lib, api):
for f, (restype, argtypes) in api.items():
func = getattr(lib, f)
func.restype = restype
func.argtypes = argtypes
register_api(_FI, API)
class FI_TYPES(object):
FIT_UNKNOWN = 0
FIT_BITMAP = 1
FIT_UINT16 = 2
FIT_INT16 = 3
FIT_UINT32 = 4
FIT_INT32 = 5
FIT_FLOAT = 6
FIT_DOUBLE = 7
FIT_COMPLEX = 8
FIT_RGB16 = 9
FIT_RGBA16 = 10
FIT_RGBF = 11
FIT_RGBAF = 12
dtypes = {
FIT_BITMAP: numpy.uint8,
FIT_UINT16: numpy.uint16,
FIT_INT16: numpy.int16,
FIT_UINT32: numpy.uint32,
FIT_INT32: numpy.int32,
FIT_FLOAT: numpy.float32,
FIT_DOUBLE: numpy.float64,
FIT_COMPLEX: numpy.complex128,
FIT_RGB16: numpy.uint16,
FIT_RGBA16: numpy.uint16,
FIT_RGBF: numpy.float32,
FIT_RGBAF: numpy.float32
}
fi_types = {
(numpy.dtype('uint8'), 1): FIT_BITMAP,
(numpy.dtype('uint8'), 3): FIT_BITMAP,
(numpy.dtype('uint8'), 4): FIT_BITMAP,
(numpy.dtype('uint16'), 1): FIT_UINT16,
(numpy.dtype('int16'), 1): FIT_INT16,
(numpy.dtype('uint32'), 1): FIT_UINT32,
(numpy.dtype('int32'), 1): FIT_INT32,
(numpy.dtype('float32'), 1): FIT_FLOAT,
(numpy.dtype('float64'), 1): FIT_DOUBLE,
(numpy.dtype('complex128'), 1): FIT_COMPLEX,
(numpy.dtype('uint16'), 3): FIT_RGB16,
(numpy.dtype('uint16'), 4): FIT_RGBA16,
(numpy.dtype('float32'), 3): FIT_RGBF,
(numpy.dtype('float32'), 4): FIT_RGBAF
}
extra_dims = {
FIT_UINT16: [],
FIT_INT16: [],
FIT_UINT32: [],
FIT_INT32: [],
FIT_FLOAT: [],
FIT_DOUBLE: [],
FIT_COMPLEX: [],
FIT_RGB16: [3],
FIT_RGBA16: [4],
FIT_RGBF: [3],
FIT_RGBAF: [4]
}
@classmethod
def get_type_and_shape(cls, bitmap):
w = _FI.FreeImage_GetWidth(bitmap)
h = _FI.FreeImage_GetHeight(bitmap)
fi_type = _FI.FreeImage_GetImageType(bitmap)
if not fi_type:
raise ValueError('Unknown image pixel type')
dtype = cls.dtypes[fi_type]
if fi_type == cls.FIT_BITMAP:
bpp = _FI.FreeImage_GetBPP(bitmap)
if bpp == 8:
extra_dims = []
elif bpp == 24:
extra_dims = [3]
elif bpp == 32:
extra_dims = [4]
else:
raise ValueError('Cannot convert %d BPP bitmap' % bpp)
else:
extra_dims = cls.extra_dims[fi_type]
return numpy.dtype(dtype), extra_dims + [w, h]
class IO_FLAGS(object):
FIF_LOAD_NOPIXELS = 0x8000 # loading: load the image header only
# (not supported by all plugins)
BMP_DEFAULT = 0
BMP_SAVE_RLE = 1
CUT_DEFAULT = 0
DDS_DEFAULT = 0
EXR_DEFAULT = 0 # save data as half with piz-based wavelet compression
EXR_FLOAT = 0x0001 # save data as float instead of as half (not recommended)
EXR_NONE = 0x0002 # save with no compression
EXR_ZIP = 0x0004 # save with zlib compression, in blocks of 16 scan lines
EXR_PIZ = 0x0008 # save with piz-based wavelet compression
EXR_PXR24 = 0x0010 # save with lossy 24-bit float compression
EXR_B44 = 0x0020 # save with lossy 44% float compression
# - goes to 22% when combined with EXR_LC
EXR_LC = 0x0040 # save images with one luminance and two chroma channels,
# rather than as RGB (lossy compression)
FAXG3_DEFAULT = 0
GIF_DEFAULT = 0
GIF_LOAD256 = 1 # Load the image as a 256 color image with ununsed
# palette entries, if it's 16 or 2 color
GIF_PLAYBACK = 2 # 'Play' the GIF to generate each frame (as 32bpp)
# instead of returning raw frame data when loading
HDR_DEFAULT = 0
ICO_DEFAULT = 0
ICO_MAKEALPHA = 1 # convert to 32bpp and create an alpha channel from the
# AND-mask when loading
IFF_DEFAULT = 0
J2K_DEFAULT = 0 # save with a 16:1 rate
JP2_DEFAULT = 0 # save with a 16:1 rate
JPEG_DEFAULT = 0 # loading (see JPEG_FAST);
# saving (see JPEG_QUALITYGOOD|JPEG_SUBSAMPLING_420)
JPEG_FAST = 0x0001 # load the file as fast as possible,
# sacrificing some quality
JPEG_ACCURATE = 0x0002 # load the file with the best quality,
# sacrificing some speed
JPEG_CMYK = 0x0004 # load separated CMYK "as is"
# (use | to combine with other load flags)
JPEG_EXIFROTATE = 0x0008 # load and rotate according to
# Exif 'Orientation' tag if available
JPEG_QUALITYSUPERB = 0x80 # save with superb quality (100:1)
JPEG_QUALITYGOOD = 0x0100 # save with good quality (75:1)
JPEG_QUALITYNORMAL = 0x0200 # save with normal quality (50:1)
JPEG_QUALITYAVERAGE = 0x0400 # save with average quality (25:1)
JPEG_QUALITYBAD = 0x0800 # save with bad quality (10:1)
JPEG_PROGRESSIVE = 0x2000 # save as a progressive-JPEG
# (use | to combine with other save flags)
JPEG_SUBSAMPLING_411 = 0x1000 # save with high 4x1 chroma
# subsampling (4:1:1)
JPEG_SUBSAMPLING_420 = 0x4000 # save with medium 2x2 medium chroma
# subsampling (4:2:0) - default value
JPEG_SUBSAMPLING_422 = 0x8000 # save with low 2x1 chroma subsampling (4:2:2)
JPEG_SUBSAMPLING_444 = 0x10000 # save with no chroma subsampling (4:4:4)
JPEG_OPTIMIZE = 0x20000 # on saving, compute optimal Huffman coding tables
# (can reduce a few percent of file size)
JPEG_BASELINE = 0x40000 # save basic JPEG, without metadata or any markers
KOALA_DEFAULT = 0
LBM_DEFAULT = 0
MNG_DEFAULT = 0
PCD_DEFAULT = 0
PCD_BASE = 1 # load the bitmap sized 768 x 512
PCD_BASEDIV4 = 2 # load the bitmap sized 384 x 256
PCD_BASEDIV16 = 3 # load the bitmap sized 192 x 128
PCX_DEFAULT = 0
PFM_DEFAULT = 0
PICT_DEFAULT = 0
PNG_DEFAULT = 0
PNG_IGNOREGAMMA = 1 # loading: avoid gamma correction
PNG_Z_BEST_SPEED = 0x0001 # save using ZLib level 1 compression flag
# (default value is 6)
PNG_Z_DEFAULT_COMPRESSION = 0x0006 # save using ZLib level 6 compression
# flag (default recommended value)
PNG_Z_BEST_COMPRESSION = 0x0009 # save using ZLib level 9 compression flag
# (default value is 6)
PNG_Z_NO_COMPRESSION = 0x0100 # save without ZLib compression
PNG_INTERLACED = 0x0200 # save using Adam7 interlacing (use | to combine
# with other save flags)
PNM_DEFAULT = 0
PNM_SAVE_RAW = 0 # Writer saves in RAW format (i.e. P4, P5 or P6)
PNM_SAVE_ASCII = 1 # Writer saves in ASCII format (i.e. P1, P2 or P3)
PSD_DEFAULT = 0
PSD_CMYK = 1 # reads tags for separated CMYK (default is conversion to RGB)
PSD_LAB = 2 # reads tags for CIELab (default is conversion to RGB)
RAS_DEFAULT = 0
RAW_DEFAULT = 0 # load the file as linear RGB 48-bit
RAW_PREVIEW = 1 # try to load the embedded JPEG preview with included
# Exif Data or default to RGB 24-bit
RAW_DISPLAY = 2 # load the file as RGB 24-bit
SGI_DEFAULT = 0
TARGA_DEFAULT = 0
TARGA_LOAD_RGB888 = 1 # Convert RGB555 and ARGB8888 -> RGB888.
TARGA_SAVE_RLE = 2 # Save with RLE compression
TIFF_DEFAULT = 0
TIFF_CMYK = 0x0001 # reads/stores tags for separated CMYK
# (use | to combine with compression flags)
TIFF_PACKBITS = 0x0100 # save using PACKBITS compression
TIFF_DEFLATE = 0x0200 # save using DEFLATE (a.k.a. ZLIB) compression
TIFF_ADOBE_DEFLATE = 0x0400 # save using ADOBE DEFLATE compression
TIFF_NONE = 0x0800 # save without any compression
TIFF_CCITTFAX3 = 0x1000 # save using CCITT Group 3 fax encoding
TIFF_CCITTFAX4 = 0x2000 # save using CCITT Group 4 fax encoding
TIFF_LZW = 0x4000 # save using LZW compression
TIFF_JPEG = 0x8000 # save using JPEG compression
TIFF_LOGLUV = 0x10000 # save using LogLuv compression
WBMP_DEFAULT = 0
XBM_DEFAULT = 0
XPM_DEFAULT = 0
class METADATA_MODELS(object):
FIMD_COMMENTS = 0
FIMD_EXIF_MAIN = 1
FIMD_EXIF_EXIF = 2
FIMD_EXIF_GPS = 3
FIMD_EXIF_MAKERNOTE = 4
FIMD_EXIF_INTEROP = 5
FIMD_IPTC = 6
FIMD_XMP = 7
FIMD_GEOTIFF = 8
FIMD_ANIMATION = 9
class METADATA_DATATYPE(object):
FIDT_BYTE = 1 # 8-bit unsigned integer
FIDT_ASCII = 2 # 8-bit bytes w/ last byte null
FIDT_SHORT = 3 # 16-bit unsigned integer
FIDT_LONG = 4 # 32-bit unsigned integer
FIDT_RATIONAL = 5 # 64-bit unsigned fraction
FIDT_SBYTE = 6 # 8-bit signed integer
FIDT_UNDEFINED = 7 # 8-bit untyped data
FIDT_SSHORT = 8 # 16-bit signed integer
FIDT_SLONG = 9 # 32-bit signed integer
FIDT_SRATIONAL = 10 # 64-bit signed fraction
FIDT_FLOAT = 11 # 32-bit IEEE floating point
FIDT_DOUBLE = 12 # 64-bit IEEE floating point
FIDT_IFD = 13 # 32-bit unsigned integer (offset)
FIDT_PALETTE = 14 # 32-bit RGBQUAD
FIDT_LONG8 = 16 # 64-bit unsigned integer
FIDT_SLONG8 = 17 # 64-bit signed integer
FIDT_IFD8 = 18 # 64-bit unsigned integer (offset)
dtypes = {
FIDT_BYTE: numpy.uint8,
FIDT_SHORT: numpy.uint16,
FIDT_LONG: numpy.uint32,
FIDT_RATIONAL: [('numerator', numpy.uint32),
('denominator', numpy.uint32)],
FIDT_SBYTE: numpy.int8,
FIDT_UNDEFINED: numpy.uint8,
FIDT_SSHORT: numpy.int16,
FIDT_SLONG: numpy.int32,
FIDT_SRATIONAL: [('numerator', numpy.int32),
('denominator', numpy.int32)],
FIDT_FLOAT: numpy.float32,
FIDT_DOUBLE: numpy.float64,
FIDT_IFD: numpy.uint32,
FIDT_PALETTE: [('R', numpy.uint8), ('G', numpy.uint8),
('B', numpy.uint8), ('A', numpy.uint8)],
FIDT_LONG8: numpy.uint64,
FIDT_SLONG8: numpy.int64,
FIDT_IFD8: numpy.uint64
}
def _process_bitmap(filename, flags, process_func):
filename = asbytes(filename)
ftype = _FI.FreeImage_GetFileType(filename, 0)
if ftype == -1:
raise ValueError('Cannot determine type of file %s' % filename)
bitmap = _FI.FreeImage_Load(ftype, filename, flags)
bitmap = ctypes.c_void_p(bitmap)
if not bitmap:
raise ValueError('Could not load file %s' % filename)
try:
return process_func(bitmap)
finally:
_FI.FreeImage_Unload(bitmap)
def read(filename, flags=0):
"""Read an image to a numpy array of shape (height, width) for
greyscale images, or shape (height, width, nchannels) for RGB or
RGBA images.
The `flags` parameter should be one or more values from the IO_FLAGS
class defined in this module, or-ed together with | as appropriate.
(See the source-code comments for more details.)
"""
return _process_bitmap(filename, flags, _array_from_bitmap)
def read_metadata(filename):
"""Return a dict containing all image metadata.
Returned dict maps (metadata_model, tag_name) keys to tag values, where
metadata_model is a string name based on the FreeImage "metadata models"
defined in the class METADATA_MODELS.
"""
flags = IO_FLAGS.FIF_LOAD_NOPIXELS
return _process_bitmap(filename, flags, _read_metadata)
def _process_multipage(filename, flags, process_func):
filename = asbytes(filename)
ftype = _FI.FreeImage_GetFileType(filename, 0)
if ftype == -1:
raise ValueError('Cannot determine type of file %s' % filename)
create_new = False
read_only = True
keep_cache_in_memory = True
multibitmap = _FI.FreeImage_OpenMultiBitmap(ftype, filename, create_new,
read_only, keep_cache_in_memory,
flags)
multibitmap = ctypes.c_void_p(multibitmap)
if not multibitmap:
raise ValueError('Could not open %s as multi-page image.' % filename)
try:
pages = _FI.FreeImage_GetPageCount(multibitmap)
out = []
for i in range(pages):
bitmap = _FI.FreeImage_LockPage(multibitmap, i)
bitmap = ctypes.c_void_p(bitmap)
if not bitmap:
raise ValueError('Could not open %s as a multi-page image.'
% filename)
try:
out.append(process_func(bitmap))
finally:
_FI.FreeImage_UnlockPage(multibitmap, bitmap, False)
return out
finally:
_FI.FreeImage_CloseMultiBitmap(multibitmap, 0)
def read_multipage(filename, flags=0):
"""Read a multipage image to a list of numpy arrays, where each
array is of shape (height, width) for greyscale images, or shape
(height, width, nchannels) for RGB or RGBA images.
The `flags` parameter should be one or more values from the IO_FLAGS
class defined in this module, or-ed together with | as appropriate.
(See the source-code comments for more details.)
"""
return _process_multipage(filename, flags, _array_from_bitmap)
def read_multipage_metadata(filename):
"""Read a multipage image to a list of metadata dicts, one dict for each
page. The dict format is as in read_metadata().
"""
flags = IO_FLAGS.FIF_LOAD_NOPIXELS
return _process_multipage(filename, flags, _read_metadata)
def _wrap_bitmap_bits_in_array(bitmap, shape, dtype):
"""Return an ndarray view on the data in a FreeImage bitmap. Only
valid for as long as the bitmap is loaded (if single page) / locked
in memory (if multipage).
"""
pitch = _FI.FreeImage_GetPitch(bitmap)
height = shape[-1]
byte_size = height * pitch
itemsize = dtype.itemsize
if len(shape) == 3:
strides = (itemsize, shape[0] * itemsize, pitch)
else:
strides = (itemsize, pitch)
bits = _FI.FreeImage_GetBits(bitmap)
array = numpy.ndarray(shape, dtype=dtype,
buffer=(ctypes.c_char * byte_size).from_address(bits),
strides=strides)
return array
def _array_from_bitmap(bitmap):
"""Convert a FreeImage bitmap pointer to a numpy array.
"""
dtype, shape = FI_TYPES.get_type_and_shape(bitmap)
array = _wrap_bitmap_bits_in_array(bitmap, shape, dtype)
# swizzle the color components and flip the scanlines to go from
# FreeImage's BGR[A] and upside-down internal memory format to something
# more normal
def n(arr):
return arr[..., ::-1].T
if len(shape) == 3 and _FI.FreeImage_IsLittleEndian() and \
dtype.type == numpy.uint8:
b = n(array[0])
g = n(array[1])
r = n(array[2])
if shape[0] == 3:
return numpy.dstack((r, g, b))
elif shape[0] == 4:
a = n(array[3])
return numpy.dstack((r, g, b, a))
else:
raise ValueError('Cannot handle images of shape %s' % shape)
# We need to copy because array does *not* own its memory
# after bitmap is freed.
return n(array).copy()
def _read_metadata(bitmap):
metadata = {}
models = [(name[5:], number) for name, number in
METADATA_MODELS.__dict__.items() if name.startswith('FIMD_')]
tag = ctypes.c_void_p()
for model_name, number in models:
mdhandle = _FI.FreeImage_FindFirstMetadata(number, bitmap,
ctypes.byref(tag))
mdhandle = ctypes.c_void_p(mdhandle)
if mdhandle:
more = True
while more:
tag_name = asstr(_FI.FreeImage_GetTagKey(tag))
tag_type = _FI.FreeImage_GetTagType(tag)
byte_size = _FI.FreeImage_GetTagLength(tag)
char_ptr = ctypes.c_char * byte_size
tag_str = char_ptr.from_address(_FI.FreeImage_GetTagValue(tag))
if tag_type == METADATA_DATATYPE.FIDT_ASCII:
tag_val = asstr(tag_str.value)
else:
tag_val = numpy.fromstring(tag_str,
dtype=METADATA_DATATYPE.dtypes[tag_type])
if len(tag_val) == 1:
tag_val = tag_val[0]
metadata[(model_name, tag_name)] = tag_val
more = _FI.FreeImage_FindNextMetadata(mdhandle, ctypes.byref(tag))
_FI.FreeImage_FindCloseMetadata(mdhandle)
return metadata
def write(array, filename, flags=0):
"""Write a (height, width) or (height, width, nchannels) array to
a greyscale, RGB, or RGBA image, with file type deduced from the
filename.
The `flags` parameter should be one or more values from the IO_FLAGS
class defined in this module, or-ed together with | as appropriate.
(See the source-code comments for more details.)
"""
array = numpy.asarray(array)
filename = asbytes(filename)
ftype = _FI.FreeImage_GetFIFFromFilename(filename)
if ftype == -1:
raise ValueError('Cannot determine type for %s' % filename)
bitmap, fi_type = _array_to_bitmap(array)
try:
if fi_type == FI_TYPES.FIT_BITMAP:
can_write = _FI.FreeImage_FIFSupportsExportBPP(ftype,
_FI.FreeImage_GetBPP(bitmap))
else:
can_write = _FI.FreeImage_FIFSupportsExportType(ftype, fi_type)
if not can_write:
raise TypeError('Cannot save image of this format '
'to this file type')
res = _FI.FreeImage_Save(ftype, bitmap, filename, flags)
if not res:
raise RuntimeError('Could not save image properly.')
finally:
_FI.FreeImage_Unload(bitmap)
def write_multipage(arrays, filename, flags=0):
"""Write a list of (height, width) or (height, width, nchannels)
arrays to a multipage greyscale, RGB, or RGBA image, with file type
deduced from the filename.
The `flags` parameter should be one or more values from the IO_FLAGS
class defined in this module, or-ed together with | as appropriate.
(See the source-code comments for more details.)
"""
filename = asbytes(filename)
ftype = _FI.FreeImage_GetFIFFromFilename(filename)
if ftype == -1:
raise ValueError('Cannot determine type of file %s' % filename)
create_new = True
read_only = False
keep_cache_in_memory = True
multibitmap = _FI.FreeImage_OpenMultiBitmap(ftype, filename,
create_new, read_only,
keep_cache_in_memory, 0)
if not multibitmap:
raise ValueError('Could not open %s for writing multi-page image.' %
filename)
try:
for array in arrays:
array = numpy.asarray(array)
bitmap, fi_type = _array_to_bitmap(array)
_FI.FreeImage_AppendPage(multibitmap, bitmap)
finally:
_FI.FreeImage_CloseMultiBitmap(multibitmap, flags)
# 4-byte quads of 0,v,v,v from 0,0,0,0 to 0,255,255,255
_GREY_PALETTE = numpy.arange(0, 0x01000000, 0x00010101, dtype=numpy.uint32)
def _array_to_bitmap(array):
"""Allocate a FreeImage bitmap and copy a numpy array into it.
"""
shape = array.shape
dtype = array.dtype
r, c = shape[:2]
if len(shape) == 2:
n_channels = 1
w_shape = (c, r)
elif len(shape) == 3:
n_channels = shape[2]
w_shape = (n_channels, c, r)
else:
n_channels = shape[0]
try:
fi_type = FI_TYPES.fi_types[(dtype, n_channels)]
except KeyError:
raise ValueError('Cannot write arrays of given type and shape.')
itemsize = array.dtype.itemsize
bpp = 8 * itemsize * n_channels
bitmap = _FI.FreeImage_AllocateT(fi_type, c, r, bpp, 0, 0, 0)
bitmap = ctypes.c_void_p(bitmap)
if not bitmap:
raise RuntimeError('Could not allocate image for storage')
try:
def n(arr): # normalise to freeimage's in-memory format
return arr.T[:, ::-1]
wrapped_array = _wrap_bitmap_bits_in_array(bitmap, w_shape, dtype)
# swizzle the color components and flip the scanlines to go to
# FreeImage's BGR[A] and upside-down internal memory format
if len(shape) == 3 and _FI.FreeImage_IsLittleEndian() and \
dtype.type == numpy.uint8:
wrapped_array[0] = n(array[:, :, 2])
wrapped_array[1] = n(array[:, :, 1])
wrapped_array[2] = n(array[:, :, 0])
if shape[2] == 4:
wrapped_array[3] = n(array[:, :, 3])
else:
wrapped_array[:] = n(array)
if len(shape) == 2 and dtype.type == numpy.uint8:
palette = _FI.FreeImage_GetPalette(bitmap)
palette = ctypes.c_void_p(palette)
if not palette:
raise RuntimeError('Could not get image palette')
ctypes.memmove(palette, _GREY_PALETTE.ctypes.data, 1024)
return bitmap, fi_type
except:
_FI.FreeImage_Unload(bitmap)
raise
def imread(filename):
"""
img = imread(filename)
Reads an image from file `filename`
Parameters
----------
filename : file name
Returns
-------
img : ndarray
"""
img = read(filename)
return img
def imsave(filename, img):
'''
imsave(filename, img)
Save image to disk
Image type is inferred from filename
Parameters
----------
filename : file name
img : image to be saved as nd array
'''
write(img, filename)
| emmanuelle/scikits.image | skimage/io/_plugins/freeimage_plugin.py | Python | bsd-3-clause | 26,740 |
import re
import requests
from bs4 import BeautifulSoup
from cloudbot import hook
from cloudbot.util import web, formatting
# CONSTANTS
steam_re = re.compile('.*://store.steampowered.com/app/([0-9]+)', re.I)
API_URL = "https://store.steampowered.com/api/appdetails/"
STORE_URL = "https://store.steampowered.com/app/{}/"
# OTHER FUNCTIONS
def format_game(app_id, show_url=True):
"""
Takes a Steam Store app ID and returns a formatted string with data about that app ID
:type app_id: string
:return: string
"""
params = {'appids': app_id}
try:
request = requests.get(API_URL, params=params, timeout=15)
request.raise_for_status()
except (requests.exceptions.HTTPError, requests.exceptions.ConnectionError) as e:
return "Could not get game info: {}".format(e)
data = request.json()
game = data[app_id]["data"]
# basic info
out = ["{} [h3]({})[/h3]".format(game['name'], game['release_date']['date'])]
# genres
try:
genres = ", ".join([g['description'] for g in game['genres']])
out.append(genres)
except KeyError:
# some things have no genre
pass
# pricing
if game['is_free']:
out.append("\x02Free\x02")
elif game.get("price_overview"):
price = game['price_overview']
price_now = "{}\x02{}\x02".format(price['currency'], price['final_formatted'])
if price['final'] == price['initial']:
out.append(price_now)
else:
out.append("{} ($(green)-{}%$(c) from {})".format(price_now, price['discount_percent'], price['initial_formatted']))
# else: game has no pricing, it's probably not released yet
out.append(formatting.truncate(game['short_description'], 250))
if show_url:
out.append("[h3]{}[/h3]".format(STORE_URL.format(game['steam_appid'])))
return "[h1]Steam:[/h1] " + " [div] ".join(out)
# HOOK FUNCTIONS
@hook.command()
def steam(text, reply):
"""<query> - Search for specified game/trailer/DLC"""
params = {'term': text}
try:
request = requests.get("https://store.steampowered.com/search/", params=params)
request.raise_for_status()
except (requests.exceptions.HTTPError, requests.exceptions.ConnectionError) as e:
reply("Could not get game info: {}".format(e))
raise
soup = BeautifulSoup(request.text, from_encoding="utf-8")
result = soup.find('a', {'class': 'search_result_row'})
if not result:
return "No game found."
app_id = result['data-ds-appid']
return format_game(app_id)
@hook.regex(steam_re)
def steam_url(match):
app_id = match.group(1)
return format_game(app_id, show_url=False)
| valesi/CloudBot | plugins/steam_store.py | Python | gpl-3.0 | 2,718 |
# This can be used to verify if a file contains a word or phrase
import sys,os
import datetime
import gzip
total_count=0
found_count=0
gzipfile=0
regularfile=0
KeyWord=str(raw_input("Enter the Key-Word to search for: "))
DateAfter=raw_input("Enter the date [Logs before this date will beignored] in DD-MM-YYYY format : ")
Start_date=datetime.datetime.strptime(DateAfter, "%d-%m-%Y").date()
print "Given Date : " + str(Start_date)
today = datetime.date.today()
print "Current Date " + str(today)
if Start_date > today :
print "Check the date Entered. It is greater than today's date.Quitting!"
sys.exit()
path = "/a/logs/netdeploy"
for path, subdirs, files in os.walk(path):
for name in files:
full_path = os.path.join(path, name)
#print full_path
# THIS WILL GIVE UNIX TIME : print
os.path.getmtime(os.path.join(path, name))
time_stamp_of_file = datetime.datetime.utcfromtimestamp(os.path.getmtime(full_path)).strftime('%Y-%m-%d %H:%M:%S')
File_Date=datetime.datetime.strptime(time_stamp_of_file,"%Y-%m-%d %H:%M:%S").date()
if File_Date > Start_date :
total_count=total_count+1
#print File_Date
#print full_path
if full_path.endswith(".gz"):
f = gzip.open(full_path, 'r')
file_content = f.read()
if KeyWord in file_content:
print "Log File : " + str(full_path)
print "File_date :" + str(File_Date)
found_count=found_count+1
gzipfile=gzipfile+1
#print file_content
f.close()
#sys.exit()
else :
f = open(full_path, 'r')
file_content = f.read()
if KeyWord in file_content:
print "Log File : " + str(full_path)
print "File_date :" + str(File_Date)
regularfile=regularfile+1
found_count=found_count+1
#print file_content
f.close()
#sys.exit()
print "Total Logs that were there after the given date : " + str(total_count)
print "Total number of Logs where Key-Word was found : " + str(found_count)
print "Gzipped Files :" + str(gzipfile)
print "Regular Files :" + str(regularfile)
| satheeshgopalan/python | keyword_in_files.py | Python | mit | 2,592 |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
GoGrid driver
"""
import time
import hashlib
import copy
from libcloud.utils.py3 import b
from libcloud.common.types import InvalidCredsError, LibcloudError
from libcloud.common.gogrid import GoGridConnection, BaseGoGridDriver
from libcloud.compute.providers import Provider
from libcloud.compute.types import NodeState
from libcloud.compute.base import Node, NodeDriver
from libcloud.compute.base import NodeSize, NodeImage, NodeLocation
STATE = {
"Starting": NodeState.PENDING,
"On": NodeState.RUNNING,
"On/Saving": NodeState.RUNNING,
"Off": NodeState.PENDING,
"Restarting": NodeState.REBOOTING,
"Saving": NodeState.PENDING,
"Restoring": NodeState.PENDING,
}
GOGRID_INSTANCE_TYPES = {
'512MB': {'id': '512MB',
'name': '512MB',
'ram': 512,
'disk': 30,
'bandwidth': None},
'1GB': {'id': '1GB',
'name': '1GB',
'ram': 1024,
'disk': 60,
'bandwidth': None},
'2GB': {'id': '2GB',
'name': '2GB',
'ram': 2048,
'disk': 120,
'bandwidth': None},
'4GB': {'id': '4GB',
'name': '4GB',
'ram': 4096,
'disk': 240,
'bandwidth': None},
'8GB': {'id': '8GB',
'name': '8GB',
'ram': 8192,
'disk': 480,
'bandwidth': None},
'16GB': {'id': '16GB',
'name': '16GB',
'ram': 16384,
'disk': 960,
'bandwidth': None},
'24GB': {'id': '24GB',
'name': '24GB',
'ram': 24576,
'disk': 960,
'bandwidth': None},
}
class GoGridNode(Node):
# Generating uuid based on public ip to get around missing id on
# create_node in gogrid api
#
# Used public ip since it is not mutable and specified at create time,
# so uuid of node should not change after add is completed
def get_uuid(self):
return hashlib.sha1(
b("%s:%d" % (self.public_ips, self.driver.type))
).hexdigest()
class GoGridNodeDriver(BaseGoGridDriver, NodeDriver):
"""
GoGrid node driver
"""
connectionCls = GoGridConnection
type = Provider.GOGRID
api_name = 'gogrid'
name = 'GoGrid'
website = 'http://www.gogrid.com/'
features = {"create_node": ["generates_password"]}
_instance_types = GOGRID_INSTANCE_TYPES
def __init__(self, *args, **kwargs):
"""
@inherits: L{NodeDriver.__init__}
"""
super(GoGridNodeDriver, self).__init__(*args, **kwargs)
def _get_state(self, element):
try:
return STATE[element['state']['name']]
except:
pass
return NodeState.UNKNOWN
def _get_ip(self, element):
return element.get('ip').get('ip')
def _get_id(self, element):
return element.get('id')
def _to_node(self, element, password=None):
state = self._get_state(element)
ip = self._get_ip(element)
id = self._get_id(element)
n = GoGridNode(id=id,
name=element['name'],
state=state,
public_ips=[ip],
private_ips=[],
extra={'ram': element.get('ram').get('name'),
'description': element.get('description', '')},
driver=self.connection.driver)
if password:
n.extra['password'] = password
return n
def _to_image(self, element):
n = NodeImage(id=element['id'],
name=element['friendlyName'],
driver=self.connection.driver)
return n
def _to_images(self, object):
return [self._to_image(el)
for el in object['list']]
def _to_location(self, element):
location = NodeLocation(id=element['id'],
name=element['name'],
country="US",
driver=self.connection.driver)
return location
def _to_locations(self, object):
return [self._to_location(el)
for el in object['list']]
def list_images(self, location=None):
params = {}
if location is not None:
params["datacenter"] = location.id
images = self._to_images(
self.connection.request('/api/grid/image/list', params).object)
return images
def list_nodes(self):
"""
@inherits: L{NodeDriver.list_nodes}
@rtype: C{list} of L{GoGridNode}
"""
passwords_map = {}
res = self._server_list()
try:
for password in self._password_list()['list']:
try:
passwords_map[password['server']['id']] = \
password['password']
except KeyError:
pass
except InvalidCredsError:
# some gogrid API keys don't have permission to access the
# password list.
pass
return [self._to_node(el, passwords_map.get(el.get('id')))
for el in res['list']]
def reboot_node(self, node):
"""
@inherits: L{NodeDriver.reboot_node}
@type node: L{GoGridNode}
"""
id = node.id
power = 'restart'
res = self._server_power(id, power)
if not res.success():
raise Exception(res.parse_error())
return True
def destroy_node(self, node):
"""
@inherits: L{NodeDriver.reboot_node}
@type node: L{GoGridNode}
"""
id = node.id
res = self._server_delete(id)
if not res.success():
raise Exception(res.parse_error())
return True
def _server_list(self):
return self.connection.request('/api/grid/server/list').object
def _password_list(self):
return self.connection.request('/api/support/password/list').object
def _server_power(self, id, power):
# power in ['start', 'stop', 'restart']
params = {'id': id, 'power': power}
return self.connection.request("/api/grid/server/power", params,
method='POST')
def _server_delete(self, id):
params = {'id': id}
return self.connection.request("/api/grid/server/delete", params,
method='POST')
def _get_first_ip(self, location=None):
ips = self.ex_list_ips(public=True, assigned=False, location=location)
try:
return ips[0].ip
except IndexError:
raise LibcloudError('No public unassigned IPs left',
GoGridNodeDriver)
def list_sizes(self, location=None):
sizes = []
for key, values in self._instance_types.items():
attributes = copy.deepcopy(values)
attributes.update({'price': self._get_size_price(size_id=key)})
sizes.append(NodeSize(driver=self.connection.driver, **attributes))
return sizes
def list_locations(self):
locations = self._to_locations(
self.connection.request('/api/common/lookup/list',
params={'lookup': 'ip.datacenter'}).object)
return locations
def ex_create_node_nowait(self, **kwargs):
"""Don't block until GoGrid allocates id for a node
but return right away with id == None.
The existance of this method is explained by the fact
that GoGrid assigns id to a node only few minutes after
creation.
@keyword name: String with a name for this new node (required)
@type name: C{str}
@keyword size: The size of resources allocated to this node .
(required)
@type size: L{NodeSize}
@keyword image: OS Image to boot on node. (required)
@type image: L{NodeImage}
@keyword ex_description: Description of a Node
@type ex_description: C{str}
@keyword ex_ip: Public IP address to use for a Node. If not
specified, first available IP address will be picked
@type ex_ip: C{str}
@rtype: L{GoGridNode}
"""
name = kwargs['name']
image = kwargs['image']
size = kwargs['size']
try:
ip = kwargs['ex_ip']
except KeyError:
ip = self._get_first_ip(kwargs.get('location'))
params = {'name': name,
'image': image.id,
'description': kwargs.get('ex_description', ''),
'server.ram': size.id,
'ip': ip}
object = self.connection.request('/api/grid/server/add',
params=params, method='POST').object
node = self._to_node(object['list'][0])
return node
def create_node(self, **kwargs):
"""Create a new GoGird node
@inherits: L{NodeDriver.create_node}
@keyword ex_description: Description of a Node
@type ex_description: C{str}
@keyword ex_ip: Public IP address to use for a Node. If not
specified, first available IP address will be picked
@type ex_ip: C{str}
@rtype: L{GoGridNode}
"""
node = self.ex_create_node_nowait(**kwargs)
timeout = 60 * 20
waittime = 0
interval = 2 * 60
while node.id is None and waittime < timeout:
nodes = self.list_nodes()
for i in nodes:
if i.public_ips[0] == node.public_ips[0] and i.id is not None:
return i
waittime += interval
time.sleep(interval)
if id is None:
raise Exception(
"Wasn't able to wait for id allocation for the node %s"
% str(node))
return node
def ex_save_image(self, node, name):
"""Create an image for node.
Please refer to GoGrid documentation to get info
how prepare a node for image creation:
http://wiki.gogrid.com/wiki/index.php/MyGSI
@keyword node: node to use as a base for image
@type node: L{GoGridNode}
@keyword name: name for new image
@type name: C{str}
@rtype: L{NodeImage}
"""
params = {'server': node.id,
'friendlyName': name}
object = self.connection.request('/api/grid/image/save', params=params,
method='POST').object
return self._to_images(object)[0]
def ex_edit_node(self, **kwargs):
"""Change attributes of a node.
@keyword node: node to be edited (required)
@type node: L{GoGridNode}
@keyword size: new size of a node (required)
@type size: L{NodeSize}
@keyword ex_description: new description of a node
@type ex_description: C{str}
@rtype: L{Node}
"""
node = kwargs['node']
size = kwargs['size']
params = {'id': node.id,
'server.ram': size.id}
if 'ex_description' in kwargs:
params['description'] = kwargs['ex_description']
object = self.connection.request('/api/grid/server/edit',
params=params).object
return self._to_node(object['list'][0])
def ex_edit_image(self, **kwargs):
"""Edit metadata of a server image.
@keyword image: image to be edited (required)
@type image: L{NodeImage}
@keyword public: should be the image public (required)
@type public: C{bool}
@keyword ex_description: description of the image (optional)
@type ex_description: C{str}
@keyword name: name of the image
@type name C{str}
@rtype: L{NodeImage}
"""
image = kwargs['image']
public = kwargs['public']
params = {'id': image.id,
'isPublic': str(public).lower()}
if 'ex_description' in kwargs:
params['description'] = kwargs['ex_description']
if 'name' in kwargs:
params['friendlyName'] = kwargs['name']
object = self.connection.request('/api/grid/image/edit',
params=params).object
return self._to_image(object['list'][0])
def ex_list_ips(self, **kwargs):
"""Return list of IP addresses assigned to
the account.
@keyword public: set to True to list only
public IPs or False to list only
private IPs. Set to None or not specify
at all not to filter by type
@type public: C{bool}
@keyword assigned: set to True to list only addresses
assigned to servers, False to list unassigned
addresses and set to None or don't set at all
not no filter by state
@type assigned: C{bool}
@keyword location: filter IP addresses by location
@type location: L{NodeLocation}
@rtype: C{list} of L{GoGridIpAddress}
"""
params = {}
if "public" in kwargs and kwargs["public"] is not None:
params["ip.type"] = {True: "Public",
False: "Private"}[kwargs["public"]]
if "assigned" in kwargs and kwargs["assigned"] is not None:
params["ip.state"] = {True: "Assigned",
False: "Unassigned"}[kwargs["assigned"]]
if "location" in kwargs and kwargs['location'] is not None:
params['datacenter'] = kwargs['location'].id
ips = self._to_ips(
self.connection.request('/api/grid/ip/list',
params=params).object)
return ips
| mihaisoloi/conpaas | conpaas-services/contrib/libcloud/compute/drivers/gogrid.py | Python | bsd-3-clause | 14,949 |
#!/usr/bin/python
#
# @author: Gaurav Rastogi ([email protected])
# Eric Anderson ([email protected])
# module_check: supported
#
# Copyright: (c) 2017 Gaurav Rastogi, <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_serverautoscalepolicy
author: Gaurav Rastogi ([email protected])
short_description: Module for setup of ServerAutoScalePolicy Avi RESTful Object
description:
- This module is used to configure ServerAutoScalePolicy object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.4"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent", "present"]
avi_api_update_method:
description:
- Default method for object update is HTTP PUT.
- Setting to patch will override that behavior to use HTTP PATCH.
version_added: "2.5"
default: put
choices: ["put", "patch"]
avi_api_patch_op:
description:
- Patch operation to use when using avi_api_update_method as patch.
version_added: "2.5"
choices: ["add", "replace", "delete"]
description:
description:
- User defined description for the object.
intelligent_autoscale:
description:
- Use avi intelligent autoscale algorithm where autoscale is performed by comparing load on the pool against estimated capacity of all the servers.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
intelligent_scalein_margin:
description:
- Maximum extra capacity as percentage of load used by the intelligent scheme.
- Scalein is triggered when available capacity is more than this margin.
- Allowed values are 1-99.
- Default value when not specified in API or module is interpreted by Avi Controller as 40.
intelligent_scaleout_margin:
description:
- Minimum extra capacity as percentage of load used by the intelligent scheme.
- Scaleout is triggered when available capacity is less than this margin.
- Allowed values are 1-99.
- Default value when not specified in API or module is interpreted by Avi Controller as 20.
max_scalein_adjustment_step:
description:
- Maximum number of servers to scalein simultaneously.
- The actual number of servers to scalein is chosen such that target number of servers is always more than or equal to the min_size.
- Default value when not specified in API or module is interpreted by Avi Controller as 1.
max_scaleout_adjustment_step:
description:
- Maximum number of servers to scaleout simultaneously.
- The actual number of servers to scaleout is chosen such that target number of servers is always less than or equal to the max_size.
- Default value when not specified in API or module is interpreted by Avi Controller as 1.
max_size:
description:
- Maximum number of servers after scaleout.
- Allowed values are 0-400.
min_size:
description:
- No scale-in happens once number of operationally up servers reach min_servers.
- Allowed values are 0-400.
name:
description:
- Name of the object.
required: true
scalein_alertconfig_refs:
description:
- Trigger scalein when alerts due to any of these alert configurations are raised.
- It is a reference to an object of type alertconfig.
scalein_cooldown:
description:
- Cooldown period during which no new scalein is triggered to allow previous scalein to successfully complete.
- Default value when not specified in API or module is interpreted by Avi Controller as 300.
- Units(SEC).
scaleout_alertconfig_refs:
description:
- Trigger scaleout when alerts due to any of these alert configurations are raised.
- It is a reference to an object of type alertconfig.
scaleout_cooldown:
description:
- Cooldown period during which no new scaleout is triggered to allow previous scaleout to successfully complete.
- Default value when not specified in API or module is interpreted by Avi Controller as 300.
- Units(SEC).
tenant_ref:
description:
- It is a reference to an object of type tenant.
url:
description:
- Avi controller URL of the object.
use_predicted_load:
description:
- Use predicted load rather than current load.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
uuid:
description:
- Unique object identifier of the object.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Example to create ServerAutoScalePolicy object
avi_serverautoscalepolicy:
controller: 10.10.25.42
username: admin
password: something
state: present
name: sample_serverautoscalepolicy
"""
RETURN = '''
obj:
description: ServerAutoScalePolicy (api/serverautoscalepolicy) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.network.avi.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
avi_api_update_method=dict(default='put',
choices=['put', 'patch']),
avi_api_patch_op=dict(choices=['add', 'replace', 'delete']),
description=dict(type='str',),
intelligent_autoscale=dict(type='bool',),
intelligent_scalein_margin=dict(type='int',),
intelligent_scaleout_margin=dict(type='int',),
max_scalein_adjustment_step=dict(type='int',),
max_scaleout_adjustment_step=dict(type='int',),
max_size=dict(type='int',),
min_size=dict(type='int',),
name=dict(type='str', required=True),
scalein_alertconfig_refs=dict(type='list',),
scalein_cooldown=dict(type='int',),
scaleout_alertconfig_refs=dict(type='list',),
scaleout_cooldown=dict(type='int',),
tenant_ref=dict(type='str',),
url=dict(type='str',),
use_predicted_load=dict(type='bool',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'serverautoscalepolicy',
set([]))
if __name__ == '__main__':
main()
| rahushen/ansible | lib/ansible/modules/network/avi/avi_serverautoscalepolicy.py | Python | gpl-3.0 | 7,491 |
# pylint: disable-msg=W0614,W0401,W0611,W0622
# flake8: noqa
__docformat__ = 'restructuredtext'
# Let users know if they're missing any of our hard dependencies
hard_dependencies = ("numpy", "pytz", "dateutil")
missing_dependencies = []
for dependency in hard_dependencies:
try:
__import__(dependency)
except ImportError as e:
missing_dependencies.append(dependency)
if missing_dependencies:
raise ImportError(
"Missing required dependencies {0}".format(missing_dependencies))
del hard_dependencies, dependency, missing_dependencies
# numpy compat
from pandas.compat.numpy import *
try:
from pandas._libs import (hashtable as _hashtable,
lib as _lib,
tslib as _tslib)
except ImportError as e: # pragma: no cover
# hack but overkill to use re
module = str(e).replace('cannot import name ', '')
raise ImportError("C extension: {0} not built. If you want to import "
"pandas from the source directory, you may need to run "
"'python setup.py build_ext --inplace --force' to build "
"the C extensions first.".format(module))
from datetime import datetime
# let init-time option registration happen
import pandas.core.config_init
from pandas.core.api import *
from pandas.core.sparse.api import *
from pandas.stats.api import *
from pandas.tseries.api import *
from pandas.core.computation.api import *
from pandas.core.reshape.api import *
# deprecate tools.plotting, plot_params and scatter_matrix on the top namespace
import pandas.tools.plotting
plot_params = pandas.plotting._style._Options(deprecated=True)
# do not import deprecate to top namespace
scatter_matrix = pandas.util._decorators.deprecate(
'pandas.scatter_matrix', pandas.plotting.scatter_matrix,
'pandas.plotting.scatter_matrix')
from pandas.util._print_versions import show_versions
from pandas.io.api import *
from pandas.util._tester import test
import pandas.testing
# extension module deprecations
from pandas.util._depr_module import _DeprecatedModule
json = _DeprecatedModule(deprmod='pandas.json',
moved={'dumps': 'pandas.io.json.dumps',
'loads': 'pandas.io.json.loads'})
parser = _DeprecatedModule(deprmod='pandas.parser',
removals=['na_values'],
moved={'CParserError': 'pandas.errors.ParserError'})
lib = _DeprecatedModule(deprmod='pandas.lib', deprmodto=False,
moved={'Timestamp': 'pandas.Timestamp',
'Timedelta': 'pandas.Timedelta',
'NaT': 'pandas.NaT',
'infer_dtype': 'pandas.api.types.infer_dtype'})
tslib = _DeprecatedModule(deprmod='pandas.tslib',
moved={'Timestamp': 'pandas.Timestamp',
'Timedelta': 'pandas.Timedelta',
'NaT': 'pandas.NaT',
'NaTType': 'type(pandas.NaT)',
'OutOfBoundsDatetime': 'pandas.errors.OutOfBoundsDatetime'})
# use the closest tagged version if possible
from ._version import get_versions
v = get_versions()
__version__ = v.get('closest-tag', v['version'])
del get_versions, v
# module level doc-string
__doc__ = """
pandas - a powerful data analysis and manipulation library for Python
=====================================================================
**pandas** is a Python package providing fast, flexible, and expressive data
structures designed to make working with "relational" or "labeled" data both
easy and intuitive. It aims to be the fundamental high-level building block for
doing practical, **real world** data analysis in Python. Additionally, it has
the broader goal of becoming **the most powerful and flexible open source data
analysis / manipulation tool available in any language**. It is already well on
its way toward this goal.
Main Features
-------------
Here are just a few of the things that pandas does well:
- Easy handling of missing data in floating point as well as non-floating
point data
- Size mutability: columns can be inserted and deleted from DataFrame and
higher dimensional objects
- Automatic and explicit data alignment: objects can be explicitly aligned
to a set of labels, or the user can simply ignore the labels and let
`Series`, `DataFrame`, etc. automatically align the data for you in
computations
- Powerful, flexible group by functionality to perform split-apply-combine
operations on data sets, for both aggregating and transforming data
- Make it easy to convert ragged, differently-indexed data in other Python
and NumPy data structures into DataFrame objects
- Intelligent label-based slicing, fancy indexing, and subsetting of large
data sets
- Intuitive merging and joining data sets
- Flexible reshaping and pivoting of data sets
- Hierarchical labeling of axes (possible to have multiple labels per tick)
- Robust IO tools for loading data from flat files (CSV and delimited),
Excel files, databases, and saving/loading data from the ultrafast HDF5
format
- Time series-specific functionality: date range generation and frequency
conversion, moving window statistics, moving window linear regressions,
date shifting and lagging, etc.
"""
| NixaSoftware/CVis | venv/lib/python2.7/site-packages/pandas/__init__.py | Python | apache-2.0 | 5,466 |
import logging
import os
from lib.Settings import Settings
from lib.Wrappers.NullLogger import NullLogger
class Logger:
def __init__(self, name):
if 'UNITTESTING' in os.environ:
self.logging = NullLogger()
else:
settings = Settings().getSettings()
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=settings["logs"]["level"])
self.logging = logging.getLogger(name)
def debug(self, *args, **kwargs):
self.logging.debug(*args, **kwargs)
def info(self, *args, **kwargs):
self.logging.info(*args, **kwargs)
def warning(self, *args, **kwargs):
self.logging.warning(*args, **kwargs)
def error(self, *args, **kwargs):
self.logging.error(*args, **kwargs)
def critical(self, *args, **kwargs):
self.logging.critical(*args, **kwargs)
def log(self, *args, **kwargs):
self.logging.log(*args, **kwargs)
| Open365/Open365 | lib/Wrappers/Logger.py | Python | agpl-3.0 | 1,007 |
from __future__ import absolute_import
from traits.testing.unittest_tools import unittest
from ..about_dialog import AboutDialog
from ..constant import OK, CANCEL
from ..gui import GUI
from ..toolkit import toolkit_object
from ..window import Window
ModalDialogTester = toolkit_object('util.modal_dialog_tester:ModalDialogTester')
no_modal_dialog_tester = (ModalDialogTester.__name__ == 'Unimplemented')
class TestAboutDialog(unittest.TestCase):
def setUp(self):
self.gui = GUI()
self.dialog = AboutDialog()
def test_create(self):
# test that creation and destruction works as expected
self.dialog._create()
self.gui.process_events()
self.dialog.destroy()
def test_destroy(self):
# test that destroy works even when no control
self.dialog.destroy()
def test_create_parent(self):
# test that creation and destruction works as expected with a parent
parent = Window()
self.dialog.parent = parent.control
parent._create()
self.dialog._create()
self.gui.process_events()
self.dialog.destroy()
parent.destroy()
def test_create_ok_renamed(self):
# test that creation and destruction works as expected with ok_label
self.dialog.ok_label = u"Sure"
self.dialog._create()
self.gui.process_events()
self.dialog.destroy()
@unittest.skipIf(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_accept(self):
# test that accept works as expected
# XXX duplicate of Dialog test, not needed?
tester = ModalDialogTester(self.dialog.open)
tester.open_and_run(when_opened=lambda x: x.close(accept=True))
self.assertEqual(tester.result, OK)
self.assertEqual(self.dialog.return_code, OK)
@unittest.skipIf(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_close(self):
# test that closing works as expected
# XXX duplicate of Dialog test, not needed?
tester = ModalDialogTester(self.dialog.open)
tester.open_and_run(when_opened=lambda x: self.dialog.close())
self.assertEqual(tester.result, CANCEL)
self.assertEqual(self.dialog.return_code, CANCEL)
@unittest.skipIf(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_ok(self):
# test that OK works as expected
tester = ModalDialogTester(self.dialog.open)
tester.open_and_wait(when_opened=lambda x: x.click_button(OK))
self.assertEqual(tester.result, OK)
self.assertEqual(self.dialog.return_code, OK)
@unittest.skipIf(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_renamed_ok(self):
self.dialog.ok_label = u"Sure"
# test that OK works as expected if renamed
tester = ModalDialogTester(self.dialog.open)
tester.open_and_wait(when_opened=lambda x: x.click_widget(u"Sure"))
self.assertEqual(tester.result, OK)
self.assertEqual(self.dialog.return_code, OK)
@unittest.skipIf(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_parent(self):
# test that lifecycle works with a parent
parent = Window()
self.dialog.parent = parent.control
parent.open()
tester = ModalDialogTester(self.dialog.open)
tester.open_and_run(when_opened=lambda x: x.close(accept=True))
parent.close()
self.assertEqual(tester.result, OK)
self.assertEqual(self.dialog.return_code, OK)
| geggo/pyface | pyface/tests/test_about_dialog.py | Python | bsd-3-clause | 3,554 |
# Copyright 2019, Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
import pkg_resources
import typing
from typing import cast, Any, Callable, Optional, Sequence, Union
import warnings
from google.auth.credentials import AnonymousCredentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.pubsub_v1 import types
from google.cloud.pubsub_v1.subscriber import futures
from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager
from google.pubsub_v1.services.subscriber import client as subscriber_client
if typing.TYPE_CHECKING: # pragma: NO COVER
from google.cloud.pubsub_v1 import subscriber
from google.pubsub_v1.services.subscriber.transports.grpc import (
SubscriberGrpcTransport,
)
try:
__version__ = pkg_resources.get_distribution("google-cloud-pubsub").version
except pkg_resources.DistributionNotFound:
# Distribution might not be available if we are not running from within
# a PIP package.
__version__ = "0.0"
class Client(subscriber_client.SubscriberClient):
"""A subscriber client for Google Cloud Pub/Sub.
This creates an object that is capable of subscribing to messages.
Generally, you can instantiate this client with no arguments, and you
get sensible defaults.
Args:
kwargs: Any additional arguments provided are sent as keyword
keyword arguments to the underlying
:class:`~google.cloud.pubsub_v1.gapic.subscriber_client.SubscriberClient`.
Generally you should not need to set additional keyword
arguments. Optionally, regional endpoints can be set via
``client_options`` that takes a single key-value pair that
defines the endpoint.
Example:
.. code-block:: python
from google.cloud import pubsub_v1
subscriber_client = pubsub_v1.SubscriberClient(
# Optional
client_options = {
"api_endpoint": REGIONAL_ENDPOINT
}
)
"""
def __init__(self, **kwargs: Any):
# Sanity check: Is our goal to use the emulator?
# If so, create a grpc insecure channel with the emulator host
# as the target.
if os.environ.get("PUBSUB_EMULATOR_HOST"):
kwargs["client_options"] = {
"api_endpoint": os.environ.get("PUBSUB_EMULATOR_HOST")
}
kwargs["credentials"] = AnonymousCredentials()
# Instantiate the underlying GAPIC client.
super().__init__(**kwargs)
self._target = self._transport._host
self._closed = False
@classmethod
def from_service_account_file( # type: ignore[override]
cls, filename: str, **kwargs: Any
) -> "Client":
"""Creates an instance of this client using the provided credentials
file.
Args:
filename: The path to the service account private key json file.
kwargs: Additional arguments to pass to the constructor.
Returns:
A Subscriber :class:`~google.cloud.pubsub_v1.subscriber.client.Client`
instance that is the constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(**kwargs)
from_service_account_json = from_service_account_file # type: ignore[assignment]
@property
def target(self) -> str:
"""Return the target (where the API is).
Returns:
The location of the API.
"""
return self._target
@property
def closed(self) -> bool:
"""Return whether the client has been closed and cannot be used anymore.
.. versionadded:: 2.8.0
"""
return self._closed
@property
def api(self):
"""The underlying gapic API client.
.. versionchanged:: 2.10.0
Instead of a GAPIC ``SubscriberClient`` client instance, this property is a
proxy object to it with the same interface.
.. deprecated:: 2.10.0
Use the GAPIC methods and properties on the client instance directly
instead of through the :attr:`api` attribute.
"""
msg = (
'The "api" property only exists for backward compatibility, access its '
'attributes directly thorugh the client instance (e.g. "client.foo" '
'instead of "client.api.foo").'
)
warnings.warn(msg, category=DeprecationWarning)
return super()
def subscribe(
self,
subscription: str,
callback: Callable[["subscriber.message.Message"], Any],
flow_control: Union[types.FlowControl, Sequence] = (),
scheduler: Optional["subscriber.scheduler.ThreadScheduler"] = None,
use_legacy_flow_control: bool = False,
await_callbacks_on_shutdown: bool = False,
) -> futures.StreamingPullFuture:
"""Asynchronously start receiving messages on a given subscription.
This method starts a background thread to begin pulling messages from
a Pub/Sub subscription and scheduling them to be processed using the
provided ``callback``.
The ``callback`` will be called with an individual
:class:`google.cloud.pubsub_v1.subscriber.message.Message`. It is the
responsibility of the callback to either call ``ack()`` or ``nack()``
on the message when it finished processing. If an exception occurs in
the callback during processing, the exception is logged and the message
is ``nack()`` ed.
The ``flow_control`` argument can be used to control the rate of at
which messages are pulled. The settings are relatively conservative by
default to prevent "message hoarding" - a situation where the client
pulls a large number of messages but can not process them fast enough
leading it to "starve" other clients of messages. Increasing these
settings may lead to faster throughput for messages that do not take
a long time to process.
The ``use_legacy_flow_control`` argument disables enforcing flow control
settings at the Cloud Pub/Sub server, and only the client side flow control
will be enforced.
This method starts the receiver in the background and returns a
*Future* representing its execution. Waiting on the future (calling
``result()``) will block forever or until a non-recoverable error
is encountered (such as loss of network connectivity). Cancelling the
future will signal the process to shutdown gracefully and exit.
.. note:: This uses Pub/Sub's *streaming pull* feature. This feature
properties that may be surprising. Please take a look at
https://cloud.google.com/pubsub/docs/pull#streamingpull for
more details on how streaming pull behaves compared to the
synchronous pull method.
Example:
.. code-block:: python
from google.cloud import pubsub_v1
subscriber_client = pubsub_v1.SubscriberClient()
# existing subscription
subscription = subscriber_client.subscription_path(
'my-project-id', 'my-subscription')
def callback(message):
print(message)
message.ack()
future = subscriber_client.subscribe(
subscription, callback)
try:
future.result()
except KeyboardInterrupt:
future.cancel() # Trigger the shutdown.
future.result() # Block until the shutdown is complete.
Args:
subscription:
The name of the subscription. The subscription should have already been
created (for example, by using :meth:`create_subscription`).
callback:
The callback function. This function receives the message as
its only argument and will be called from a different thread/
process depending on the scheduling strategy.
flow_control:
The flow control settings. Use this to prevent situations where you are
inundated with too many messages at once.
scheduler:
An optional *scheduler* to use when executing the callback. This
controls how callbacks are executed concurrently. This object must not
be shared across multiple ``SubscriberClient`` instances.
use_legacy_flow_control (bool):
If set to ``True``, flow control at the Cloud Pub/Sub server is disabled,
though client-side flow control is still enabled. If set to ``False``
(default), both server-side and client-side flow control are enabled.
await_callbacks_on_shutdown:
If ``True``, after canceling the returned future, the latter's
``result()`` method will block until the background stream and its
helper threads have been terminated, and all currently executing message
callbacks are done processing.
If ``False`` (default), the returned future's ``result()`` method will
not block after canceling the future. The method will instead return
immediately after the background stream and its helper threads have been
terminated, but some of the message callback threads might still be
running at that point.
Returns:
A future instance that can be used to manage the background stream.
"""
flow_control = types.FlowControl(*flow_control)
manager = streaming_pull_manager.StreamingPullManager(
self,
subscription,
flow_control=flow_control,
scheduler=scheduler,
use_legacy_flow_control=use_legacy_flow_control,
await_callbacks_on_shutdown=await_callbacks_on_shutdown,
)
future = futures.StreamingPullFuture(manager)
manager.open(callback=callback, on_callback_error=future.set_exception)
return future
def close(self) -> None:
"""Close the underlying channel to release socket resources.
After a channel has been closed, the client instance cannot be used
anymore.
This method is idempotent.
"""
transport = cast("SubscriberGrpcTransport", self._transport)
transport.grpc_channel.close()
self._closed = True
def __enter__(self) -> "Client":
if self._closed:
raise RuntimeError("Closed subscriber cannot be used as context manager.")
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
| googleapis/python-pubsub | google/cloud/pubsub_v1/subscriber/client.py | Python | apache-2.0 | 11,505 |
#
# Copyright (c) 2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""PyTest configuration."""
import os
import shutil
import sys
import pytest
import rl_coach.tests.utils.args_utils as a_utils
import rl_coach.tests.utils.presets_utils as p_utils
from rl_coach.tests.utils.definitions import Definitions as Def
from os import path
@pytest.fixture(scope="module", params=list(p_utils.collect_presets()))
def preset_name(request):
"""
Return all preset names
"""
yield request.param
@pytest.fixture(scope="function", params=list(a_utils.collect_args()))
def flag(request):
"""
Return flags names in function scope
"""
yield request.param
@pytest.fixture(scope="module", params=list(a_utils.collect_preset_for_args()))
def preset_args(request):
"""
Return preset names that can be used for args testing only; working in
module scope
"""
yield request.param
@pytest.fixture(scope="module", params=list(a_utils.collect_preset_for_seed()))
def preset_args_for_seed(request):
"""
Return preset names that can be used for args testing only and for special
action when using seed argument; working in module scope
"""
yield request.param
@pytest.fixture(scope="module",
params=list(a_utils.collect_preset_for_mxnet()))
def preset_for_mxnet_args(request):
"""
Return preset names that can be used for args testing only; this special
fixture will be used for mxnet framework only. working in module scope
"""
yield request.param
@pytest.fixture(scope="function")
def clres(request):
"""
Create both file csv and log for testing
:yield: class of both files paths
"""
class CreateCsvLog:
"""
Create a test and log paths
"""
def __init__(self, csv, log, pattern):
self.exp_path = csv
self.stdout = open(log, 'w')
self.fn_pattern = pattern
@property
def experiment_path(self):
return self.exp_path
@property
def stdout_path(self):
return self.stdout
@experiment_path.setter
def experiment_path(self, val):
self.exp_path = val
@stdout_path.setter
def stdout_path(self, val):
self.stdout = open(val, 'w')
# get preset name from test request params
idx = 0 if 'preset' in list(request.node.funcargs.items())[0][0] else 1
p_name = list(request.node.funcargs.items())[idx][1]
p_valid_params = p_utils.validation_params(p_name)
sys.path.append('.')
test_name = 'ExpName_{}'.format(p_name)
test_path = os.path.join(Def.Path.experiments, test_name)
if path.exists(test_path):
shutil.rmtree(test_path)
# get the stdout for logs results
log_file_name = 'test_log_{}.txt'.format(p_name)
fn_pattern = '*.csv' if p_valid_params.num_workers > 1 else 'worker_0*.csv'
res = CreateCsvLog(test_path, log_file_name, fn_pattern)
yield res
# clean files
if path.exists(res.exp_path):
shutil.rmtree(res.exp_path)
if path.exists(res.stdout.name):
os.remove(res.stdout.name)
| NervanaSystems/coach | rl_coach/tests/conftest.py | Python | apache-2.0 | 3,681 |
from django.db import models
class Role(models.Model):
role_name = models.CharField(max_length=50)
active = models.BooleanField(default=1)
def __unicode__(self):
return self.role_name
class User(models.Model):
name = models.CharField(max_length=50)
user_type = models.ForeignKey(Role)
db_insert_date = models.DateTimeField('date published')
db_modify_date = models.DateTimeField('date published')
db_modify_user = models.ForeignKey('self', null=True, blank=True)
active = models.BooleanField(default=1)
def __unicode__(self):
return self.name
class BookCategory(models.Model):
category_name = models.CharField(max_length=50)
category_description = models.CharField(max_length=500)
sub_category_of = models.ForeignKey('self', null=True, blank=True)
db_insert_date = models.DateTimeField('date published')
db_modify_date = models.DateTimeField('date published')
db_modify_user = models.ForeignKey(User)
active = models.BooleanField(default=1)
def __unicode__(self):
return self.category_name
class BookAttributeType(models.Model):
type_name = models.CharField(max_length=50)
db_insert_date = models.DateTimeField('date published')
db_modify_date = models.DateTimeField('date published')
db_modify_user = models.ForeignKey(User)
active = models.BooleanField(default=1)
def __unicode__(self):
return self.type_name
class BookAttribute(models.Model):
attribute_name = models.CharField(max_length=50)
attribute_type = models.ForeignKey(BookAttributeType)
attribute_description = models.CharField(max_length=500)
db_insert_date = models.DateTimeField('date published')
db_modify_date = models.DateTimeField('date published')
db_modify_user = models.ForeignKey(User)
active = models.BooleanField(default=1)
def __unicode__(self):
return self.attribute_name
class BookItem(models.Model):
book_uuid = models.CharField(max_length=40)
book_name = models.CharField(max_length=200)
book_author = models.CharField(max_length=100)
book_description = models.CharField(null=True, blank=True, max_length=1000)
book_category = models.ForeignKey(BookCategory)
book_cover = models.ForeignKey(BookAttribute, related_name='book_cover')
book_quality = models.ForeignKey(BookAttribute, related_name='book_quality')
book_language = models.ForeignKey(BookAttribute, related_name='book_language')
book_price = models.DecimalField(max_digits=10, decimal_places=2)
book_discount = models.DecimalField(null=True, blank=True, max_digits=10, decimal_places=2)
is_notable = models.BooleanField(default=0)
book_images_path = models.CharField(null=True, blank=True, max_length=200)
book_images_names = models.CharField(null=True, blank=True, max_length=1000)
book_thumbnail = models.CharField(null=True, blank=True, max_length=30)
view_counter = models.BigIntegerField(null=True, default=0)
search_hits_counter = models.BigIntegerField(null=True, default=0)
db_insert_date = models.DateTimeField('date published')
db_modify_date = models.DateTimeField('date published')
db_modify_user = models.ForeignKey(User)
active = models.BooleanField(default=1)
def __unicode__(self):
return self.book_name
| andriyboychenko/books-online | catalogue/models.py | Python | apache-2.0 | 3,350 |
# -*- coding: utf-8 -*-
# Copyright 2021 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from google.oauth2 import id_token
from google.auth.transport import requests
from flask import session
from framework import basehandlers
# from framework import permissions
# from framework import ramcache
# from internals import models
class LogoutAPI(basehandlers.APIHandler):
"""Create a session using the id_token generated by Google Sign-In."""
def do_post(self):
session.clear()
return {'message': 'Done'}
| GoogleChrome/chromium-dashboard | api/logout_api.py | Python | apache-2.0 | 1,049 |
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import functools
import mock
import unittest
from xml.dom import minidom
from airflow.www import app as application
from airflow.www import utils
class UtilsTest(unittest.TestCase):
def setUp(self):
super(UtilsTest, self).setUp()
def test_empty_variable_should_not_be_hidden(self):
self.assertFalse(utils.should_hide_value_for_key(""))
self.assertFalse(utils.should_hide_value_for_key(None))
def test_normal_variable_should_not_be_hidden(self):
self.assertFalse(utils.should_hide_value_for_key("key"))
def test_sensitive_variable_should_be_hidden(self):
self.assertTrue(utils.should_hide_value_for_key("google_api_key"))
def test_sensitive_variable_should_be_hidden_ic(self):
self.assertTrue(utils.should_hide_value_for_key("GOOGLE_API_KEY"))
def check_generate_pages_html(self, current_page, total_pages,
window=7, check_middle=False):
extra_links = 4 # first, prev, next, last
html_str = utils.generate_pages(current_page, total_pages)
# dom parser has issues with special « and »
html_str = html_str.replace('«', '')
html_str = html_str.replace('»', '')
dom = minidom.parseString(html_str)
self.assertIsNotNone(dom)
ulist = dom.getElementsByTagName('ul')[0]
ulist_items = ulist.getElementsByTagName('li')
self.assertEqual(min(window, total_pages) + extra_links, len(ulist_items))
def get_text(nodelist):
rc = []
for node in nodelist:
if node.nodeType == node.TEXT_NODE:
rc.append(node.data)
return ''.join(rc)
page_items = ulist_items[2:-2]
mid = int(len(page_items) / 2)
for i, item in enumerate(page_items):
a_node = item.getElementsByTagName('a')[0]
href_link = a_node.getAttribute('href')
node_text = get_text(a_node.childNodes)
if node_text == str(current_page + 1):
if check_middle:
self.assertEqual(mid, i)
self.assertEqual('javascript:void(0)', a_node.getAttribute('href'))
self.assertIn('active', item.getAttribute('class'))
else:
link_str = '?page=' + str(int(node_text) - 1)
self.assertEqual(link_str, href_link)
def test_generate_pager_current_start(self):
self.check_generate_pages_html(current_page=0,
total_pages=6)
def test_generate_pager_current_middle(self):
self.check_generate_pages_html(current_page=10,
total_pages=20,
check_middle=True)
def test_generate_pager_current_end(self):
self.check_generate_pages_html(current_page=38,
total_pages=39)
def test_params_no_values(self):
"""Should return an empty string if no params are passed"""
self.assertEquals('', utils.get_params())
def test_params_search(self):
self.assertEqual('search=bash_',
utils.get_params(search='bash_'))
def test_params_showPaused_true(self):
"""Should detect True as default for showPaused"""
self.assertEqual('',
utils.get_params(showPaused=True))
def test_params_showPaused_false(self):
self.assertEqual('showPaused=False',
utils.get_params(showPaused=False))
def test_params_all(self):
"""Should return params string ordered by param key"""
self.assertEqual('page=3&search=bash_&showPaused=False',
utils.get_params(showPaused=False, page=3, search='bash_'))
# flask_login is loaded by calling flask_login._get_user.
@mock.patch("flask_login._get_user")
@mock.patch("airflow.settings.Session")
def test_action_logging_with_login_user(self, mocked_session, mocked_get_user):
fake_username = 'someone'
mocked_current_user = mock.MagicMock()
mocked_get_user.return_value = mocked_current_user
mocked_current_user.user.username = fake_username
mocked_session_instance = mock.MagicMock()
mocked_session.return_value = mocked_session_instance
app = application.create_app(testing=True)
# Patching here to avoid errors in applicant.create_app
with mock.patch("airflow.models.Log") as mocked_log:
with app.test_request_context():
@utils.action_logging
def some_func():
pass
some_func()
mocked_log.assert_called_once()
(args, kwargs) = mocked_log.call_args_list[0]
self.assertEqual('some_func', kwargs['event'])
self.assertEqual(fake_username, kwargs['owner'])
mocked_session_instance.add.assert_called_once()
@mock.patch("flask_login._get_user")
@mock.patch("airflow.settings.Session")
def test_action_logging_with_invalid_user(self, mocked_session, mocked_get_user):
anonymous_username = 'anonymous'
# When the user returned by flask login_manager._load_user
# is invalid.
mocked_current_user = mock.MagicMock()
mocked_get_user.return_value = mocked_current_user
mocked_current_user.user = None
mocked_session_instance = mock.MagicMock()
mocked_session.return_value = mocked_session_instance
app = application.create_app(testing=True)
# Patching here to avoid errors in applicant.create_app
with mock.patch("airflow.models.Log") as mocked_log:
with app.test_request_context():
@utils.action_logging
def some_func():
pass
some_func()
mocked_log.assert_called_once()
(args, kwargs) = mocked_log.call_args_list[0]
self.assertEqual('some_func', kwargs['event'])
self.assertEqual(anonymous_username, kwargs['owner'])
mocked_session_instance.add.assert_called_once()
# flask_login.current_user would be AnonymousUserMixin
# when there's no user_id in the flask session.
@mock.patch("airflow.settings.Session")
def test_action_logging_with_anonymous_user(self, mocked_session):
anonymous_username = 'anonymous'
mocked_session_instance = mock.MagicMock()
mocked_session.return_value = mocked_session_instance
app = application.create_app(testing=True)
# Patching here to avoid errors in applicant.create_app
with mock.patch("airflow.models.Log") as mocked_log:
with app.test_request_context():
@utils.action_logging
def some_func():
pass
some_func()
mocked_log.assert_called_once()
(args, kwargs) = mocked_log.call_args_list[0]
self.assertEqual('some_func', kwargs['event'])
self.assertEqual(anonymous_username, kwargs['owner'])
mocked_session_instance.add.assert_called_once()
def test_get_python_source_from_method(self):
class AMockClass(object):
def a_method(self):
""" A method """
pass
mocked_class = AMockClass()
result = utils.get_python_source(mocked_class.a_method)
self.assertIn('A method', result)
def test_get_python_source_from_class(self):
class AMockClass(object):
def __call__(self):
""" A __call__ method """
pass
mocked_class = AMockClass()
result = utils.get_python_source(mocked_class)
self.assertIn('A __call__ method', result)
def test_get_python_source_from_partial_func(self):
def a_function(arg_x, arg_y):
""" A function with two args """
pass
partial_function = functools.partial(a_function, arg_x=1)
result = utils.get_python_source(partial_function)
self.assertIn('A function with two args', result)
def test_get_python_source_from_none(self):
result = utils.get_python_source(None)
self.assertIn('No source code available', result)
if __name__ == '__main__':
unittest.main()
| edgarRd/incubator-airflow | tests/www/test_utils.py | Python | apache-2.0 | 9,275 |
from vesper.tests.test_case import TestCase
import vesper.util.yaml_utils as yaml_utils
class YamlUtilsTests(TestCase):
def test_dump_and_load(self):
x = {'x': 1, 'y': [1, 2, 3], 'z': {'one': 1}}
s = yaml_utils.dump(x)
y = yaml_utils.load(s)
self.assertEqual(x, y)
def test_dump_and_load_with_non_default_flow_style(self):
x = {'x': 1, 'y': [1, 2, 3], 'z': {'one': 1}}
s = yaml_utils.dump(x, default_flow_style=False)
y = yaml_utils.load(s)
self.assertEqual(x, y)
def test_sexagesimal_load(self):
"""
The PyYAML `load` function parses YAML 1.1, in which "12:34:56"
is the sexagesimal number 12 * 3600 + 34 * 60 + 56 = 45296. We
use `ruaml_yaml` rather than PyYAML because it can also parse
YAML 1.2, in which "12:34:56" is simply the string "12:34:56".
This test checks that `yaml_utils.load` parses its input as we
would like.
"""
x = yaml_utils.load('12:34:56')
self.assertEqual(x, '12:34:56')
# def test_numpy_scalar_dump(self):
#
# """
# This test shows that you can't dump a NumPy scalar, since the
# dumper doesn't know how to represent its type. Perhaps we could
# tell it how to, or perhaps that would be more trouble than it's
# worth.
# """
#
# import numpy as np
# x = np.arange(3)
# s = yaml_utils.dump(x[1])
# self.assertEqual(s, '1')
| HaroldMills/Vesper | vesper/util/tests/test_yaml_utils.py | Python | mit | 1,582 |
# -*- coding: utf-8 -*-
#------------------------------------------------------------
# seriesly - XBMC Plugin
# Conector para safelinking (ocultador de url)
# http://blog.tvalacarta.info/plugin-xbmc/seriesly/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
from core import scrapertools
from core import logger
from core import config
def get_long_url( short_url ):
logger.info("[safelinking.py] get_long_url(short_url='%s')" % short_url)
location = scrapertools.get_header_from_response(short_url,header_to_get="location")
logger.info("location="+location)
return location
def test():
location = get_long_url("https://safelinking.net/d/b038a2ed6e")
ok = ("http://played.to" in location)
return ok | conejoninja/xbmc-seriesly | servers/safelinking.py | Python | gpl-3.0 | 800 |
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright 2002 Ben Escoto <[email protected]>
# Copyright 2007 Kenneth Loafman <[email protected]>
# Copyright 2008 Ian Barton <[email protected]>
#
# This file is part of duplicity.
#
# Duplicity is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# Duplicity is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import imaplib
import re
import os
import time
import socket
import StringIO
import rfc822
import getpass
import email
import duplicity.backend
from duplicity import globals
from duplicity import log
from duplicity.errors import * #@UnusedWildImport
class ImapBackend(duplicity.backend.Backend):
def __init__(self, parsed_url):
duplicity.backend.Backend.__init__(self, parsed_url)
log.Debug("I'm %s (scheme %s) connecting to %s as %s" %
(self.__class__.__name__, parsed_url.scheme, parsed_url.hostname, parsed_url.username))
# Store url for reconnection on error
self._url = parsed_url
# Set the username
if ( parsed_url.username is None ):
username = raw_input('Enter account userid: ')
else:
username = parsed_url.username
# Set the password
if ( not parsed_url.password ):
if os.environ.has_key('IMAP_PASSWORD'):
password = os.environ.get('IMAP_PASSWORD')
else:
password = getpass.getpass("Enter account password: ")
else:
password = parsed_url.password
self._username = username
self._password = password
self._resetConnection()
def _resetConnection(self):
parsed_url = self._url
try:
imap_server = os.environ['IMAP_SERVER']
except KeyError:
imap_server = parsed_url.hostname
# Try to close the connection cleanly
try:
self._conn.close()
except Exception:
pass
if (parsed_url.scheme == "imap"):
cl = imaplib.IMAP4
self._conn = cl(imap_server, 143)
elif (parsed_url.scheme == "imaps"):
cl = imaplib.IMAP4_SSL
self._conn = cl(imap_server, 993)
log.Debug("Type of imap class: %s" % (cl.__name__))
self.remote_dir = re.sub(r'^/', r'', parsed_url.path, 1)
# Login
if (not(globals.imap_full_address)):
self._conn.login(self._username, self._password)
self._conn.select(globals.imap_mailbox)
log.Info("IMAP connected")
else:
self._conn.login(self._username + "@" + parsed_url.hostname, self._password)
self._conn.select(globals.imap_mailbox)
log.Info("IMAP connected")
def _prepareBody(self,f,rname):
mp = email.MIMEMultipart.MIMEMultipart()
# I am going to use the remote_dir as the From address so that
# multiple archives can be stored in an IMAP account and can be
# accessed separately
mp["From"]=self.remote_dir
mp["Subject"]=rname
a = email.MIMEBase.MIMEBase("application","binary")
a.set_payload(f.read())
email.Encoders.encode_base64(a)
mp.attach(a)
return mp.as_string()
def put(self, source_path, remote_filename = None):
if not remote_filename:
remote_filename = source_path.get_filename()
f=source_path.open("rb")
allowedTimeout = globals.timeout
if (allowedTimeout == 0):
# Allow a total timeout of 1 day
allowedTimeout = 2880
while allowedTimeout > 0:
try:
self._conn.select(remote_filename)
body=self._prepareBody(f,remote_filename)
# If we don't select the IMAP folder before
# append, the message goes into the INBOX.
self._conn.select(globals.imap_mailbox)
self._conn.append(globals.imap_mailbox, None, None, body)
break
except (imaplib.IMAP4.abort, socket.error, socket.sslerror):
allowedTimeout -= 1
log.Info("Error saving '%s', retrying in 30s " % remote_filename)
time.sleep(30)
while allowedTimeout > 0:
try:
self._resetConnection()
break
except (imaplib.IMAP4.abort, socket.error, socket.sslerror):
allowedTimeout -= 1
log.Info("Error reconnecting, retrying in 30s ")
time.sleep(30)
log.Info("IMAP mail with '%s' subject stored" % remote_filename)
def get(self, remote_filename, local_path):
allowedTimeout = globals.timeout
if (allowedTimeout == 0):
# Allow a total timeout of 1 day
allowedTimeout = 2880
while allowedTimeout > 0:
try:
self._conn.select(globals.imap_mailbox)
(result,list) = self._conn.search(None, 'Subject', remote_filename)
if result != "OK":
raise Exception(list[0])
#check if there is any result
if list[0] == '':
raise Exception("no mail with subject %s")
(result,list) = self._conn.fetch(list[0],"(RFC822)")
if result != "OK":
raise Exception(list[0])
rawbody=list[0][1]
p = email.Parser.Parser()
m = p.parsestr(rawbody)
mp = m.get_payload(0)
body = mp.get_payload(decode=True)
break
except (imaplib.IMAP4.abort, socket.error, socket.sslerror):
allowedTimeout -= 1
log.Info("Error loading '%s', retrying in 30s " % remote_filename)
time.sleep(30)
while allowedTimeout > 0:
try:
self._resetConnection()
break
except (imaplib.IMAP4.abort, socket.error, socket.sslerror):
allowedTimeout -= 1
log.Info("Error reconnecting, retrying in 30s ")
time.sleep(30)
tfile = local_path.open("wb")
tfile.write(body)
local_path.setdata()
log.Info("IMAP mail with '%s' subject fetched" % remote_filename)
def _list(self):
ret = []
(result,list) = self._conn.select(globals.imap_mailbox)
if result != "OK":
raise BackendException(list[0])
# Going to find all the archives which have remote_dir in the From
# address
# Search returns an error if you haven't selected an IMAP folder.
(result,list) = self._conn.search(None, 'FROM', self.remote_dir)
if result!="OK":
raise Exception(list[0])
if list[0]=='':
return ret
nums=list[0].split(" ")
set="%s:%s"%(nums[0],nums[-1])
(result,list) = self._conn.fetch(set,"(BODY[HEADER])")
if result!="OK":
raise Exception(list[0])
for msg in list:
if (len(msg)==1):continue
io = StringIO.StringIO(msg[1])
m = rfc822.Message(io)
subj = m.getheader("subject")
header_from = m.getheader("from")
# Catch messages with empty headers which cause an exception.
if (not (header_from == None)):
if (re.compile("^" + self.remote_dir + "$").match(header_from)):
ret.append(subj)
log.Info("IMAP LIST: %s %s" % (subj,header_from))
return ret
def _imapf(self,fun,*args):
(ret,list)=fun(*args)
if ret != "OK":
raise Exception(list[0])
return list
def _delete_single_mail(self,i):
self._imapf(self._conn.store,i,"+FLAGS",'\\DELETED')
def _expunge(self):
list=self._imapf(self._conn.expunge)
def delete(self, filename_list):
assert len(filename_list) > 0
for filename in filename_list:
list = self._imapf(self._conn.search,None,"(SUBJECT %s)"%filename)
list = list[0].split()
if len(list)==0 or list[0]=="":raise Exception("no such mail with subject '%s'"%filename)
self._delete_single_mail(list[0])
log.Notice("marked %s to be deleted" % filename)
self._expunge()
log.Notice("IMAP expunged %s files" % len(list))
def close(self):
self._conn.select(globals.imap_mailbox)
self._conn.close()
self._conn.logout()
duplicity.backend.register_backend("imap", ImapBackend);
duplicity.backend.register_backend("imaps", ImapBackend);
| yasoob/PythonRSSReader | venv/lib/python2.7/dist-packages/duplicity/backends/imapbackend.py | Python | mit | 9,393 |
### Digitised data from [Firek1995]
import numpy as np
# Steady State Inactivation
def Inact_Firek():
"""
Steady-State inactivation curve [Firek1995]
cf Fig 3c
"""
x = [-50, -35, -25, -15, -5, 5, 15]
y = np.asarray([0.9478260869565218,
0.9356521739130435,
0.8852173913043478,
0.6869565217391305,
0.6278260869565218,
0.5478260869565218,
0.5252173913043479])
ylower = np.asarray([0.8973913043478261,
0.9130434782608696,
0.8400000000000001,
0.6452173913043479,
0.5669565217391304,
0.5130434782608696,
0.5026086956521739])
sem = np.abs(y-ylower)
N = 6
sd = np.sqrt(N)*sem
return x, y.tolist(), sd.tolist()
| c22n/ion-channel-ABC | docs/examples/human-atrial/data/isus/Firek1995/data_Firek1995.py | Python | gpl-3.0 | 912 |
from ...utilities import COLUMN, ROW, index_1d, inverted, is_on_board_2d
from ..direction import Direction
from ..tessellation_base import TessellationBase
class HexobanTessellation(TessellationBase):
_LEGAL_DIRECTIONS = (
Direction.LEFT,
Direction.RIGHT,
Direction.NORTH_EAST,
Direction.NORTH_WEST,
Direction.SOUTH_EAST,
Direction.SOUTH_WEST,
)
_CHR_TO_ATOMIC_MOVE = None
_ATOMIC_MOVE_TO_CHR = None
@property
def legal_directions(self):
return self._LEGAL_DIRECTIONS
@property
def graph_type(self):
from ...graph import GraphType
return GraphType.DIRECTED
def neighbor_position(self, position, direction, board_width, board_height):
# if not is_on_board_1d(position, board_width, board_height):
# return None
row = ROW(position, board_width)
column = COLUMN(position, board_width)
if direction == Direction.LEFT:
column -= 1
elif direction == Direction.RIGHT:
column += 1
elif direction == Direction.NORTH_EAST:
column += row % 2
row -= 1
elif direction == Direction.NORTH_WEST:
column -= (row + 1) % 2
row -= 1
elif direction == Direction.SOUTH_EAST:
column += row % 2
row += 1
elif direction == Direction.SOUTH_WEST:
column -= (row + 1) % 2
row += 1
else:
raise ValueError(direction)
if is_on_board_2d(column, row, board_width, board_height):
return index_1d(column, row, board_width)
return None
@property
def _char_to_atomic_move_dict(self):
if not self.__class__._CHR_TO_ATOMIC_MOVE:
from ...snapshot import AtomicMove
self.__class__._CHR_TO_ATOMIC_MOVE = {
AtomicMove.l: (Direction.LEFT, False),
AtomicMove.L: (Direction.LEFT, True),
AtomicMove.r: (Direction.RIGHT, False),
AtomicMove.R: (Direction.RIGHT, True),
AtomicMove.u: (Direction.NORTH_WEST, False),
AtomicMove.U: (Direction.NORTH_WEST, True),
AtomicMove.d: (Direction.SOUTH_EAST, False),
AtomicMove.D: (Direction.SOUTH_EAST, True),
AtomicMove.n: (Direction.NORTH_EAST, False),
AtomicMove.N: (Direction.NORTH_EAST, True),
AtomicMove.s: (Direction.SOUTH_WEST, False),
AtomicMove.S: (Direction.SOUTH_WEST, True),
}
return self._CHR_TO_ATOMIC_MOVE
@property
def _atomic_move_to_char_dict(self):
if not self.__class__._ATOMIC_MOVE_TO_CHR:
self.__class__._ATOMIC_MOVE_TO_CHR = inverted(
self._char_to_atomic_move_dict
)
return self._ATOMIC_MOVE_TO_CHR
def __str__(self):
return "hexoban"
| tadams42/sokoenginepy | src/sokoenginepy/tessellation/hexoban_tessellation/hexoban_tessellation.py | Python | gpl-3.0 | 2,950 |
import optparse
parser = optparse.OptionParser()
parser.add_option('--pdb_in', default=None, help='Input PDB file')
parser.add_option('--dms_out', default=None, help='Output dms file')
(args, options) = parser.parse_args()
import os
if not os.path.exists(args.pdb_in):
raise Exception(args.pdb_in+' does not exist')
print 'Processing '+args.pdb_in
import chimera
chimera.openModels.open(args.pdb_in)
# Molecular surface
from chimera import runCommand, openModels, MSMSModel
chimera.runCommand("surf") # generate surface using 'surf' command
surf = openModels.list(modelTypes=[MSMSModel])[0] # get the surf object
from WriteDMS import writeDMS
writeDMS(surf, args.dms_out) # write DMS
| gkumar7/AlGDock | Pipeline/_receptor_surface.chimera.py | Python | mit | 690 |
from flask import Flask
from flask import render_template
from random import randint
app = Flask('myApp')
@app.route('/')
def hello():
return '<style> h1 { color: red; text-align: center; } </style> <h1>Hello World</h1>'
# app.run()
# '@app.route()' = a decorator
# a decorator (lines 7-9) !== a function
@app.route('/<name>')
def hello_someone(name):
adjective = ['fantastic', 'awesome', 'amazing']
randAdj = randint(0, len(adjective)-1)
exaggerate = ['pretty', 'freaking', 'totally']
randEx = randint(0, len(exaggerate)-1)
return '<h1>Hello {0}, you are {1} {2}!</h1>'.format(name.title(), exaggerate[randEx], adjective[randAdj])
### use 'render_template' (imported on line 2)
# @app.route('/<name>')
# def hi_there(name):
# return render_template('hello.html', name=name.title())
### Look into further >> need to set up an hello.html file to call on?
# Set up Python for 'contact.html'
# URL extension & sign up function
@app.route('/signup', methods=['POST'])
def sign_up():
form_data = request.form
print form_data['name']
print form_data['email']
return 'All OK'
### Need to link the 'contact.html' file
# insert 'debug=True' within 'app.run()' to avoid having to stop/rerunning the Python through the terminal
app.run(
debug=True
)
# {% ... %} used within the examples in the session notes pdf >> Jinja (a templating language for Python) (like Liquid, Django, etc(?))
| madeleinel/Cheat-Sheets | advCFG/Session3/flaskApp/FlaskExs.py | Python | mit | 1,434 |
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
Test for the piezo tensor class
"""
__author__ = "Shyam Dwaraknath"
__version__ = "0.1"
__maintainer__ = "Shyam Dwaraknath"
__email__ = "[email protected]"
__status__ = "Development"
__date__ = "4/1/16"
import os
import unittest
import numpy as np
from pymatgen.analysis.piezo import PiezoTensor
from pymatgen.util.testing import PymatgenTest
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..",
'test_files')
class PiezoTest(PymatgenTest):
def setUp(self):
self.piezo_struc = self.get_structure('BaNiO3')
self.voigt_matrix = np.array([[0., 0., 0., 0., 0.03839, 0.],
[0., 0., 0., 0.03839, 0., 0.],
[6.89822, 6.89822, 27.46280, 0., 0., 0.]])
self.vasp_matrix = np.array([[0., 0., 0., 0., 0., 0.03839],
[0., 0., 0., 0., 0.03839, 0., 0.],
[6.89822, 6.89822, 27.46280, 0., 0., 0.]])
self.full_tensor_array = [[[0., 0., 0.03839],
[0., 0., 0.],
[0.03839, 0., 0.]],
[[0., 0., 0.],
[0., 0., 0.03839],
[0., 0.03839, 0.]],
[[6.89822, 0., 0.],
[0., 6.89822, 0.],
[0., 0., 27.4628]]]
def test_new(self):
pt = PiezoTensor(self.full_tensor_array)
self.assertArrayAlmostEqual(pt, self.full_tensor_array)
bad_dim_array = np.zeros((3, 3))
self.assertRaises(ValueError, PiezoTensor, bad_dim_array)
def test_from_voigt(self):
bad_voigt = np.zeros((3, 7))
pt = PiezoTensor.from_voigt(self.voigt_matrix)
self.assertArrayEqual(pt, self.full_tensor_array)
self.assertRaises(ValueError, PiezoTensor.from_voigt, bad_voigt)
self.assertArrayEqual(self.voigt_matrix, pt.voigt)
def test_from_vasp_voigt(self):
bad_voigt = np.zeros((3, 7))
pt = PiezoTensor.from_vasp_voigt(self.vasp_matrix)
self.assertArrayEqual(pt, self.full_tensor_array)
self.assertRaises(ValueError, PiezoTensor.from_voigt, bad_voigt)
self.assertArrayEqual(self.voigt_matrix, pt.voigt)
if __name__ == '__main__':
unittest.main()
| dongsenfo/pymatgen | pymatgen/analysis/tests/test_piezo.py | Python | mit | 2,535 |
# Portions Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
# hg.py - repository classes for mercurial
#
# Copyright 2005-2007 Matt Mackall <[email protected]>
# Copyright 2006 Vadim Gelfer <[email protected]>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import
import errno
import hashlib
import os
import shutil
import weakref
from . import (
bookmarks,
bundlerepo,
clone as clonemod,
cmdutil,
destutil,
discovery,
edenfs,
error,
exchange,
extensions,
localrepo,
lock,
merge as mergemod,
mononokepeer,
node,
perftrace,
phases,
progress,
pycompat,
scmutil,
sshpeer,
eagerpeer,
ui as uimod,
uiconfig,
url,
util,
vfs as vfsmod,
visibility,
)
from .i18n import _
from .node import nullid
from .pycompat import encodeutf8
release = lock.release
# shared features
sharedbookmarks = "bookmarks"
def _local(path):
path = util.expandpath(util.urllocalpath(path))
return os.path.isfile(path) and bundlerepo or localrepo
def addbranchrevs(lrepo, other, branches, revs):
peer = other.peer() # a courtesy to callers using a localrepo for other
hashbranch, branches = branches
if not hashbranch and not branches:
x = revs or None
if util.safehasattr(revs, "first"):
y = revs.first()
elif revs:
y = revs[0]
else:
y = None
return x, y
if revs:
revs = list(revs)
else:
revs = []
if not peer.capable("branchmap"):
if branches:
raise error.Abort(_("remote branch lookup not supported"))
revs.append(hashbranch)
return revs, revs[0]
branchmap = peer.branchmap()
def primary(branch):
if branch == ".":
if not lrepo:
raise error.Abort(_("dirstate branch not accessible"))
branch = lrepo.dirstate.branch()
if branch in branchmap:
revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
return True
else:
return False
for branch in branches:
if not primary(branch):
raise error.RepoLookupError(_("unknown branch '%s'") % branch)
if hashbranch:
if not primary(hashbranch):
revs.append(hashbranch)
return revs, revs[0]
def parseurl(path, branches=None):
"""parse url#branch, returning (url, (branch, branches))"""
u = util.url(path)
branch = None
if u.fragment:
branch = u.fragment
u.fragment = None
return str(u), (branch, branches or [])
schemes = {
"bundle": bundlerepo,
"eager": eagerpeer,
"file": _local,
"mononoke": mononokepeer,
"ssh": sshpeer,
"test": eagerpeer,
}
def _peerlookup(path):
u = util.url(path)
scheme = u.scheme or "file"
thing = schemes.get(scheme) or schemes["file"]
try:
return thing(path)
except TypeError:
# we can't test callable(thing) because 'thing' can be an unloaded
# module that implements __call__
if not util.safehasattr(thing, "instance"):
raise
return thing
def islocal(repo):
"""return true if repo (or path pointing to repo) is local"""
if isinstance(repo, str):
try:
return _peerlookup(repo).islocal(repo)
except AttributeError:
return False
return repo.local()
def openpath(ui, path):
"""open path with open if local, url.open if remote"""
pathurl = util.url(path, parsequery=False, parsefragment=False)
if pathurl.islocal():
return util.posixfile(pathurl.localpath(), "rb")
else:
return url.open(ui, path)
# a list of (ui, repo) functions called for wire peer initialization
wirepeersetupfuncs = []
def _setuprepo(ui, repo, presetupfuncs=None):
ui = getattr(repo, "ui", ui)
for f in presetupfuncs or []:
f(ui, repo)
for name, module in extensions.extensions(ui):
hook = getattr(module, "reposetup", None)
if hook:
hook(ui, repo)
if not repo.local():
perftrace.traceflag("remote")
for f in wirepeersetupfuncs:
f(ui, repo)
else:
perftrace.traceflag("local")
@perftrace.tracefunc("Repo Setup")
def repository(ui, path="", create=False, presetupfuncs=None):
"""return a repository object for the specified path"""
u = util.url(path)
if u.scheme == "bundle":
creator = bundlerepo
else:
creator = _local(path)
repo = creator.instance(ui, path, create)
_setuprepo(ui, repo, presetupfuncs=presetupfuncs)
repo = repo.local()
if not repo:
raise error.Abort(_("repository '%s' is not local") % (path or peer.url()))
return repo
@perftrace.tracefunc("Peer Setup")
def peer(uiorrepo, opts, path, create=False):
"""return a repository peer for the specified path"""
rui = remoteui(uiorrepo, opts)
obj = _peerlookup(path).instance(rui, path, create)
# Uncomment this once we stop using file clones in sandcastle
# if obj.local() and not rui.configbool("experimental", "allowfilepeer"):
# raise error.Abort(_("cannot create peer for local file repository '%s'")
# % path)
_setuprepo(rui, obj)
return obj.peer()
def defaultdest(source):
"""return default destination of clone if none is given
>>> defaultdest('foo')
'foo'
>>> defaultdest('/foo/bar')
'bar'
>>> defaultdest('/')
''
>>> defaultdest('')
''
>>> defaultdest('http://example.org/')
''
>>> defaultdest('http://example.org/foo/')
'foo'
"""
path = util.url(source).path
if not path:
return ""
return os.path.basename(os.path.normpath(path))
def share(
ui, source, dest=None, update=True, bookmarks=True, defaultpath=None, relative=False
):
"""create a shared repository"""
if not islocal(source):
raise error.Abort(_("can only share local repositories"))
if not dest:
dest = defaultdest(source)
else:
dest = ui.expandpath(dest)
if isinstance(source, str):
origsource = ui.expandpath(source)
source, branches = parseurl(origsource)
srcrepo = repository(ui, source)
rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
else:
srcrepo = source.local()
origsource = source = srcrepo.url()
checkout = None
sharedpath = srcrepo.sharedpath # if our source is already sharing
destwvfs = vfsmod.vfs(dest, realpath=True)
destvfs = vfsmod.vfs(os.path.join(destwvfs.base, ".hg"), realpath=True)
if destvfs.lexists():
raise error.Abort(_("destination already exists"))
if not destwvfs.isdir():
destwvfs.mkdir()
destvfs.makedir()
requirements = srcrepo.requirements.copy()
if relative:
try:
sharedpath = os.path.relpath(sharedpath, destvfs.base)
requirements.add("relshared")
except (IOError, ValueError) as e:
# ValueError is raised on Windows if the drive letters differ on
# each path
raise error.Abort(_("cannot calculate relative path"), hint=str(e))
else:
requirements.add("shared")
scmutil.writerequires(destvfs, requirements)
destvfs.writeutf8("sharedpath", sharedpath)
r = repository(ui, destwvfs.base)
postshare(srcrepo, r, bookmarks=bookmarks, defaultpath=defaultpath)
_postshareupdate(r, update, checkout=checkout)
return r
def unshare(ui, repo):
"""convert a shared repository to a normal one
Copy the store data to the repo and remove the sharedpath data.
"""
destlock = lock = None
lock = repo.lock()
try:
# we use locks here because if we race with commit, we
# can end up with extra data in the cloned revlogs that's
# not pointed to by changesets, thus causing verify to
# fail
destlock = copystore(ui, repo, repo.path)
sharefile = repo.localvfs.join("sharedpath")
util.rename(sharefile, sharefile + ".old")
repo.requirements.discard("shared")
repo.requirements.discard("relshared")
repo._writerequirements()
finally:
destlock and destlock.release()
lock and lock.release()
# update store, spath, svfs and sjoin of repo
# invalidate before rerunning __init__
repo.invalidate(clearfilecache=True)
repo.invalidatedirstate()
repo.__init__(repo.baseui, repo.root)
def postshare(sourcerepo, destrepo, bookmarks=True, defaultpath=None):
"""Called after a new shared repo is created.
The new repo only has a requirements file and pointer to the source.
This function configures additional shared data.
Extensions can wrap this function and write additional entries to
destrepo/.hg/shared to indicate additional pieces of data to be shared.
"""
default = defaultpath or sourcerepo.ui.config("paths", "default")
if default:
fp = destrepo.localvfs("hgrc", "w", text=True)
fp.write("[paths]\n")
fp.write("default = %s\n" % default)
fp.close()
with destrepo.wlock():
if bookmarks:
fp = destrepo.localvfs("shared", "wb")
fp.write(pycompat.encodeutf8(sharedbookmarks + "\n"))
fp.close()
def _postshareupdate(repo, update, checkout=None):
"""Maybe perform a working directory update after a shared repo is created.
``update`` can be a boolean or a revision to update to.
"""
if not update:
return
repo.ui.status(_("updating working directory\n"))
if update is not True:
checkout = update
for test in (checkout, "default", "tip"):
if test is None:
continue
try:
uprev = repo.lookup(test)
break
except error.RepoLookupError:
continue
_update(repo, uprev)
def copystore(ui, srcrepo, destpath):
"""copy files from store of srcrepo in destpath
returns destlock
"""
destlock = None
try:
with progress.bar(ui, _("linking")) as prog:
hardlink = False
num = 0
srcpublishing = srcrepo.publishing()
srcvfs = vfsmod.vfs(srcrepo.sharedpath)
dstvfs = vfsmod.vfs(destpath)
for f in srcrepo.store.copylist():
if srcpublishing and f.endswith("phaseroots"):
continue
dstbase = os.path.dirname(f)
if dstbase and not dstvfs.exists(dstbase):
dstvfs.mkdir(dstbase)
if srcvfs.exists(f):
if f.endswith("data"):
# 'dstbase' may be empty (e.g. revlog format 0)
lockfile = os.path.join(dstbase, "lock")
# lock to avoid premature writing to the target
destlock = lock.lock(dstvfs, lockfile, ui=ui)
hardlink, num = util.copyfiles(
srcvfs.join(f), dstvfs.join(f), hardlink, num, prog
)
if hardlink:
ui.debug("linked %d files\n" % num)
else:
ui.debug("copied %d files\n" % num)
return destlock
except: # re-raises
release(destlock)
raise
def clone(
ui,
peeropts,
source,
dest=None,
pull=False,
rev=None,
update=True,
stream=False,
shallow=False,
):
"""Make a copy of an existing repository.
Create a copy of an existing repository in a new directory. The
source and destination are URLs, as passed to the repository
function. Returns a pair of repository peers, the source and
newly created destination.
The location of the source is added to the new repository's
.hg/hgrc file, as the default to be used for future pulls and
pushes.
If an exception is raised, the partly cloned/updated destination
repository will be deleted.
Arguments:
source: repository object or URL
dest: URL of destination repository to create (defaults to base
name of source repository)
pull: always pull from source repository, even in local case or if the
server prefers streaming
stream: stream raw data uncompressed from repository (fast over
LAN, slow over WAN)
rev: revision to clone up to (implies pull=True)
update: update working directory after clone completes, if
destination is local repository (True means update to default rev,
anything else is treated as a revision)
"""
if dest is None:
dest = defaultdest(source)
if dest:
ui.status(_("destination directory: %s\n") % dest)
else:
dest = ui.expandpath(dest)
destpeer = None
dest = util.urllocalpath(dest)
if not dest:
raise error.Abort(_("empty destination path is not valid"))
destcreated = False
destvfs = vfsmod.vfs(dest, expandpath=True)
if destvfs.lexists():
if not destvfs.isdir():
raise error.Abort(_("destination '%s' already exists") % dest)
elif destvfs.listdir():
raise error.Abort(_("destination '%s' is not empty") % dest)
else:
destcreated = True
# Create the destination repo before we even open the connection to the
# source, so we can use any repo-specific configuration for the connection.
try:
# Note: This triggers hgrc.dynamic generation with empty repo hgrc.
destpeer = repository(ui, dest, create=True)
except OSError as inst:
if inst.errno == errno.EEXIST:
cleandir = None
raise error.Abort(_("destination '%s' already exists") % dest)
raise
destrepo = destpeer.local()
# Get the source url, so we can write it into the dest hgrc
if isinstance(source, str):
origsource = ui.expandpath(source)
else:
srcpeer = source.peer() # in case we were called with a localrepo
origsource = source = source.peer().url()
abspath = origsource
if islocal(origsource):
abspath = os.path.abspath(util.urllocalpath(origsource))
if destrepo:
_writehgrc(destrepo, abspath, ui.configlist("_configs", "configfiles"))
# Reload hgrc to pick up `%include` configs. We don't need to
# regenerate dynamicconfig here, unless the hgrc contains reponame or
# username overrides (unlikely).
destrepo.ui.reloadconfigs(destrepo.root)
if shallow:
from edenscm.hgext.remotefilelog.shallowrepo import requirement
if requirement not in destrepo.requirements:
with destrepo.lock():
destrepo.requirements.add(requirement)
destrepo._writerequirements()
# Reopen the repo so reposetup in extensions can see the added
# requirement.
# To keep command line config overrides, reuse the ui from the
# old repo object. A cleaner way might be figuring out the
# overrides and then set them, in case extensions changes the
# class of the ui object.
origui = destrepo.ui
destrepo = repository(ui, dest)
destrepo.ui = origui
# Construct the srcpeer after the destpeer, so we can use the destrepo.ui
# configs.
try:
if isinstance(source, str):
source, mayberevs = parseurl(origsource)
if len(mayberevs) == 1:
rev = rev or mayberevs[0]
srcpeer = peer(destrepo.ui if destrepo else ui, peeropts, source)
except Exception:
if destcreated:
# Clean up the entire repo directory we made.
shutil.rmtree(dest, True)
else:
# Clean up just the .hg directory we made.
shutil.rmtree(os.path.join(dest, ".hg"), True)
raise
branch = (None, [])
rev, checkout = addbranchrevs(srcpeer, srcpeer, branch, rev)
source = util.urllocalpath(source)
srclock = destlock = destlockw = cleandir = None
srcrepo = srcpeer.local()
try:
if islocal(dest):
cleandir = dest
copy = False
if (
srcrepo
and srcrepo.cancopy()
and islocal(dest)
and not phases.hassecret(srcrepo)
):
copy = not pull and not rev
if copy:
try:
# we use a lock here because if we race with commit, we
# can end up with extra data in the cloned revlogs that's
# not pointed to by changesets, thus causing verify to
# fail
srclock = srcrepo.lock(wait=False)
except error.LockError:
copy = False
if copy:
clonecodepath = "copy"
srcrepo.hook("preoutgoing", throw=True, source="clone")
hgdir = os.path.realpath(os.path.join(dest, ".hg"))
if not os.path.exists(dest):
os.mkdir(dest)
else:
# only clean up directories we create ourselves
cleandir = hgdir
try:
destpath = hgdir
except OSError as inst:
if inst.errno == errno.EEXIST:
cleandir = None
raise error.Abort(_("destination '%s' already exists") % dest)
raise
# Drop the existing destrepo so Windows releases the files.
# Manually gc to ensure the objects are dropped.
destpeer = destrepo = None
import gc
gc.collect()
destlock = copystore(ui, srcrepo, destpath)
# repo initialization might also take a lock. Keeping destlock
# outside the repo object can cause deadlock. To avoid deadlock,
# we just release destlock here. The lock will be re-acquired
# soon by `destpeer`, or `local.lock()` below.
if destlock is not None:
destlock.release()
# copy bookmarks over
srcbookmarks = srcrepo.svfs.join("bookmarks")
dstbookmarks = os.path.join(destpath, "store", "bookmarks")
if os.path.exists(srcbookmarks):
util.copyfile(srcbookmarks, dstbookmarks)
# we need to re-init the repo after manually copying the data
# into it
destpeer = peer(srcrepo, peeropts, dest)
destrepo = destpeer.local()
srcrepo.hook("outgoing", source="clone", node=node.hex(node.nullid))
else:
clonecodepath = "legacy-pull"
revs = None
if rev:
if not srcpeer.capable("lookup"):
raise error.Abort(
_(
"src repository does not support "
"revision lookup and so doesn't "
"support clone by revision"
)
)
revs = [srcpeer.lookup(r) for r in rev]
checkout = revs[0]
# Can we use EdenAPI CloneData provided by a separate EdenAPI
# client?
if (
getattr(destrepo, "nullableedenapi", None)
and destrepo.name
and (
(
ui.configbool("clone", "force-edenapi-clonedata")
or destrepo.ui.configbool("clone", "force-edenapi-clonedata")
)
or (
(
ui.configbool("clone", "prefer-edenapi-clonedata")
or destrepo.ui.configbool(
"clone", "prefer-edenapi-clonedata"
)
)
and "segmented-changelog" in destrepo.edenapi.capabilities()
)
)
):
clonecodepath = "segments"
ui.status(_("fetching lazy changelog\n"))
data = destrepo.edenapi.clonedata()
clonemod.segmentsclone(srcpeer.url(), data, destrepo)
# Can we use the new code path (stream clone + shallow + no
# update + selective pull)?
elif (
destrepo
and not pull
and not update
and not rev
and shallow
and stream is not False
and ui.configbool("remotenames", "selectivepull")
):
if ui.configbool("unsafe", "emergency-clone"):
clonecodepath = "emergency"
clonemod.emergencyclone(srcpeer.url(), destrepo)
else:
clonecodepath = "revlog"
clonemod.revlogclone(srcpeer.url(), destrepo)
elif destrepo:
reasons = []
if pull:
reasons.append("pull")
if update:
reasons.append("update")
if rev:
reasons.append("rev")
if not shallow:
reasons.append("not-shallow")
if stream is False:
reasons.append("not-stream")
if not ui.configbool("remotenames", "selectivepull"):
reasons.append("not-selectivepull")
ui.log(
"features",
fullargs=repr(pycompat.sysargv),
feature="legacy-clone",
traceback=util.smarttraceback(),
reason=" ".join(reasons),
)
with destrepo.wlock(), destrepo.lock(), destrepo.transaction("clone"):
if not stream:
if pull:
stream = False
else:
stream = None
overrides = {
# internal config: ui.quietbookmarkmove
("ui", "quietbookmarkmove"): True,
# the normal pull process each commit and so is more expensive
# than streaming bytes from disk to the wire.
# disabling selectivepull allows to run a streamclone
("remotenames", "selectivepull"): False,
}
opargs = {}
if shallow:
opargs["extras"] = {"shallow": True}
with destrepo.ui.configoverride(overrides, "clone"):
exchange.pull(
destrepo,
srcpeer,
revs,
streamclonerequested=stream,
opargs=opargs,
)
elif srcrepo:
exchange.push(
srcrepo, destpeer, revs=revs, bookmarks=srcrepo._bookmarks.keys()
)
else:
raise error.Abort(_("clone from remote to remote not supported"))
cleandir = None
if destrepo:
with destrepo.wlock(), destrepo.lock(), destrepo.transaction("clone"):
if update:
if update is not True:
checkout = srcpeer.lookup(update)
uprev = None
status = None
if checkout is not None:
try:
uprev = destrepo.lookup(checkout)
except error.RepoLookupError:
if update is not True:
try:
uprev = destrepo.lookup(update)
except error.RepoLookupError:
pass
if uprev is None:
try:
uprev = destrepo._bookmarks["@"]
update = "@"
bn = destrepo[uprev].branch()
if bn == "default":
status = _("updating to bookmark @\n")
else:
status = _("updating to bookmark @ on branch %s\n") % bn
except KeyError:
try:
uprev = destrepo.branchtip("default")
except error.RepoLookupError:
uprev = destrepo.lookup("tip")
if not status:
bn = destrepo[uprev].branch()
status = _("updating to branch %s\n") % bn
destrepo.ui.status(status)
_update(destrepo, uprev)
if update in destrepo._bookmarks:
bookmarks.activate(destrepo, update)
clonepreclose(
ui,
peeropts,
source,
dest,
pull,
rev,
update,
stream,
srcpeer,
destpeer,
clonecodepath=clonecodepath,
)
finally:
release(srclock, destlockw, destlock)
if srcpeer is not None:
srcpeer.close()
if destpeer is not None:
destpeer.close()
if cleandir is not None:
shutil.rmtree(cleandir, True)
return srcpeer, destpeer
def _writehgrc(repo, abspath, configfiles):
with repo.wlock(), repo.lock():
template = uimod.samplehgrcs["cloned"]
with repo.localvfs("hgrc", "wb") as fp:
u = util.url(abspath)
u.passwd = None
defaulturl = str(u)
fp.write(pycompat.encodeutf8(util.tonativeeol(template % defaulturl)))
if configfiles:
fp.write(pycompat.encodeutf8(util.tonativeeol("\n")))
for file in configfiles:
fp.write(
pycompat.encodeutf8(util.tonativeeol("%%include %s\n" % file))
)
def clonepreclose(
ui,
peeropts,
source,
dest=None,
pull=False,
rev=None,
update=True,
stream=False,
srcpeer=None,
destpeer=None,
clonecodepath=None,
):
"""Wrapped by extensions like remotenames before closing the peers
clonecodepath is one of:
- "copy": The clone was done by copying local files.
- "legacy-pull": The clone was done by the (legacy) pull code path.
- "revlog": The clone was done by the clone.streamclone code path,
which is less racy and writes remote bookmarks.
- "segments": The clone was done by lazy changelog path.
- "emergency": The clone was done by the emergency code path.
"""
return srcpeer, destpeer
def _showstats(repo, stats, quietempty=False):
if edenfs.requirement in repo.requirements:
return _eden_showstats(repo, stats, quietempty)
if quietempty and not any(stats):
return
repo.ui.status(
_(
"%d files updated, %d files merged, "
"%d files removed, %d files unresolved\n"
)
% stats
)
def _eden_showstats(repo, stats, quietempty=False):
# We hide the updated and removed counts, because they are not accurate
# with eden. One of the primary goals of eden is that the entire working
# directory does not need to be accessed or traversed on update operations.
(updated, merged, removed, unresolved) = stats
if merged or unresolved:
repo.ui.status(
_("%d files merged, %d files unresolved\n") % (merged, unresolved)
)
elif not quietempty:
repo.ui.status(_("update complete\n"))
def updaterepo(repo, node, overwrite, updatecheck=None):
"""Update the working directory to node.
When overwrite is set, changes are clobbered, merged else
returns stats (see pydoc mercurial.merge.applyupdates)"""
return mergemod.update(
repo,
node,
False,
overwrite,
labels=["working copy", "destination"],
updatecheck=updatecheck,
)
def update(repo, node, quietempty=False, updatecheck=None):
"""update the working directory to node
Returns if any files were unresolved.
"""
stats = updaterepo(repo, node, False, updatecheck=updatecheck)
_showstats(repo, stats, quietempty)
if stats[3]:
repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
return stats[3] > 0
# naming conflict in clone()
_update = update
def clean(repo, node, show_stats=True, quietempty=False):
"""forcibly switch the working directory to node, clobbering changes
Returns if any files were unresolved.
"""
stats = updaterepo(repo, node, True)
repo.localvfs.unlinkpath("graftstate", ignoremissing=True)
if show_stats:
_showstats(repo, stats, quietempty)
return stats[3] > 0
# naming conflict in updatetotally()
_clean = clean
def updatetotally(ui, repo, checkout, brev, clean=False, updatecheck=None):
"""Update the working directory with extra care for non-file components
This takes care of non-file components below:
:bookmark: might be advanced or (in)activated
This takes arguments below:
:checkout: to which revision the working directory is updated
:brev: a name, which might be a bookmark to be activated after updating
:clean: whether changes in the working directory can be discarded
:updatecheck: how to deal with a dirty working directory
Valid values for updatecheck are (None => linear):
* abort: abort if the working directory is dirty
* none: don't check (merge working directory changes into destination)
* linear: check that update is linear before merging working directory
changes into destination
* noconflict: check that the update does not result in file merges
This returns whether conflict is detected at updating or not.
"""
if updatecheck is None:
updatecheck = ui.config("commands", "update.check")
if updatecheck not in ("abort", "none", "linear", "noconflict"):
# If not configured, or invalid value configured
updatecheck = "linear"
with repo.wlock():
movemarkfrom = None
warndest = False
if checkout is None:
updata = destutil.destupdate(repo, clean=clean)
checkout, movemarkfrom, brev = updata
warndest = True
if clean:
hasunresolved = _clean(repo, checkout)
else:
if updatecheck == "abort":
cmdutil.bailifchanged(repo, merge=False)
updatecheck = "none"
hasunresolved = _update(repo, checkout, updatecheck=updatecheck)
if not hasunresolved and movemarkfrom:
if movemarkfrom == repo["."].node():
pass # no-op update
elif bookmarks.update(repo, [movemarkfrom], repo["."].node()):
b = ui.label(repo._activebookmark, "bookmarks.active")
ui.status(_("updating bookmark %s\n") % b)
else:
# this can happen with a non-linear update
b = ui.label(repo._activebookmark, "bookmarks")
ui.status(_("(leaving bookmark %s)\n") % b)
bookmarks.deactivate(repo)
elif brev in repo._bookmarks:
if brev != repo._activebookmark:
b = ui.label(brev, "bookmarks.active")
ui.status(_("(activating bookmark %s)\n") % b)
bookmarks.activate(repo, brev)
else:
if repo._activebookmark:
b = ui.label(repo._activebookmark, "bookmarks")
ui.status(_("(leaving bookmark %s)\n") % b)
bookmarks.deactivate(repo)
if warndest:
destutil.statusotherdests(ui, repo)
return hasunresolved
def merge(repo, node, force=None, remind=True, mergeforce=False, labels=None):
"""Branch merge with node, resolving changes. Return true if any
unresolved conflicts."""
stats = mergemod.update(
repo, node, True, force, mergeforce=mergeforce, labels=labels
)
_showstats(repo, stats)
if stats[3]:
repo.ui.status(
_(
"use 'hg resolve' to retry unresolved file merges "
"or 'hg update -C .' to abandon\n"
)
)
elif remind:
repo.ui.status(_("(branch merge, don't forget to commit)\n"))
return stats[3] > 0
def remoteui(src, opts):
"build a remote ui from ui or repo and opts"
if util.safehasattr(src, "baseui"): # looks like a repository
dst = src.baseui.copy() # drop repo-specific config
src = src.ui # copy target options from repo
else: # assume it's a global ui object
dst = src.copy() # keep all global options
# copy ssh-specific options
for o in "ssh", "remotecmd":
v = opts.get(o) or src.config("ui", o)
if v:
dst.setconfig("ui", o, v, "copied")
# copy bundle-specific options
r = src.config("bundle", "mainreporoot")
if r:
dst.setconfig("bundle", "mainreporoot", r, "copied")
# copy selected local settings to the remote ui
for sect in (
"auth",
"auth_proxy",
"cats",
"hostfingerprints",
"hostsecurity",
"http_proxy",
"help",
"edenapi",
"infinitepush",
"lfs",
"mononokepeer",
):
for key, val in src.configitems(sect):
dst.setconfig(sect, key, val, "copied")
v = src.config("web", "cacerts")
if v:
dst.setconfig("web", "cacerts", util.expandpath(v), "copied")
return dst
# Files of interest
# Used to check if the repository has changed looking at mtime and size of
# these files.
foi = [
("spath", "00changelog.i"),
("spath", "phaseroots"), # ! phase can change content at the same size
("path", "bookmarks"), # ! bookmark can change content at the same size
]
class cachedlocalrepo(object):
"""Holds a localrepository that can be cached and reused."""
def __init__(self, repo):
"""Create a new cached repo from an existing repo.
We assume the passed in repo was recently created. If the
repo has changed between when it was created and when it was
turned into a cache, it may not refresh properly.
"""
assert isinstance(repo, localrepo.localrepository)
self._repo = repo
self._state, self.mtime = self._repostate()
def fetch(self):
"""Refresh (if necessary) and return a repository.
If the cached instance is out of date, it will be recreated
automatically and returned.
Returns a tuple of the repo and a boolean indicating whether a new
repo instance was created.
"""
# We compare the mtimes and sizes of some well-known files to
# determine if the repo changed. This is not precise, as mtimes
# are susceptible to clock skew and imprecise filesystems and
# file content can change while maintaining the same size.
state, mtime = self._repostate()
if state == self._state:
return self._repo, False
repo = repository(self._repo.baseui, self._repo.url())
self._repo = repo
self._state = state
self.mtime = mtime
return self._repo, True
def _repostate(self):
state = []
maxmtime = -1
for attr, fname in foi:
prefix = getattr(self._repo, attr)
p = os.path.join(prefix, fname)
try:
st = util.stat(p)
except OSError:
st = util.stat(prefix)
state.append((st.st_mtime, st.st_size))
maxmtime = max(maxmtime, st.st_mtime)
return tuple(state), maxmtime
def copy(self):
"""Obtain a copy of this class instance.
A new localrepository instance is obtained. The new instance should be
completely independent of the original.
"""
repo = repository(self._repo.baseui, self._repo.origroot)
c = cachedlocalrepo(repo)
c._state = self._state
c.mtime = self.mtime
return c
| facebookexperimental/eden | eden/scm/edenscm/mercurial/hg.py | Python | gpl-2.0 | 36,891 |
# -----------------------------------------------------------------------------
# Copyright (c) 2005-2020, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License (version 2
# or later) with exception for distributing the bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
# -----------------------------------------------------------------------------
# This hook was tested with pyexcel-xlsxw 0.4.2:
# https://github.com/pyexcel/pyexcel-xlsxw
hiddenimports = ['pyexcel_xlsxw']
| etherkit/OpenBeacon2 | client/linux-arm/venv/lib/python3.6/site-packages/PyInstaller/hooks/hook-pyexcel-xlsxw.py | Python | gpl-3.0 | 638 |
# This code runs the new_index script on a 24 hour basis
# Importing the file and time keeping
import new_index, time
# While loop to run the new_index code every 24 hours
while True:
# Run new_index
new_index
# Just to keep user aware
print ("I will check for a new file in 24 hours")
# 24 hours = 86400 seconds
time.sleep(86400)
| Coding-Credibility/EDGAR | daily.py | Python | apache-2.0 | 368 |
#!/usr/bin/python
import os
import sys
from stat import *
import zmq
import netifaces
import threading
import signal
import time
import argparse
import ConfigParser
from machinekit import service
from machinekit import config
from message_pb2 import Container
from config_pb2 import *
from types_pb2 import *
class ConfigServer:
def __init__(self, context, appDirs=[], topdir=".",
ip="", svcUuid=None, debug=False, name=None, ipInName=True):
self.appDirs = appDirs
self.ip = ip
self.name = name
self.debug = debug
self.shutdown = threading.Event()
self.running = False
self.cfg = ConfigParser.ConfigParser()
for rootdir in self.appDirs:
for root, subFolders, files in os.walk(rootdir):
if 'description.ini' in files:
inifile = os.path.join(root, 'description.ini')
cfg = ConfigParser.ConfigParser()
cfg.read(inifile)
appName = cfg.get('Default', 'name')
description = cfg.get('Default', 'description')
appType = cfg.get('Default', 'type')
self.cfg.add_section(appName)
self.cfg.set(appName, 'description', description)
self.cfg.set(appName, 'type', appType)
self.cfg.set(appName, 'files', root)
if self.debug:
print(("name: " + cfg.get('Default', 'name')))
print(("description: " + cfg.get('Default', 'description')))
print(("type: " + cfg.get('Default', 'type')))
print(("files: " + root))
self.rx = Container()
self.tx = Container()
self.topdir = topdir
self.context = context
self.baseUri = "tcp://" + self.ip
self.socket = context.socket(zmq.ROUTER)
self.port = self.socket.bind_to_random_port(self.baseUri)
self.dsname = self.socket.get_string(zmq.LAST_ENDPOINT, encoding='utf-8')
if self.name is None:
self.name = "Machinekit"
if ipInName:
self.name = self.name + " on " + self.ip
self.service = service.Service(type='config',
svcUuid=svcUuid,
dsn=self.dsname,
port=self.port,
ip=self.ip,
name=self.name,
debug=self.debug)
self.publish()
threading.Thread(target=self.process_sockets).start()
self.running = True
def process_sockets(self):
poll = zmq.Poller()
poll.register(self.socket, zmq.POLLIN)
while not self.shutdown.is_set():
s = dict(poll.poll(1000))
if self.socket in s:
self.process(self.socket)
self.unpublish()
self.running = False
def publish(self):
try:
self.service.publish()
except Exception as e:
print (('cannot register DNS service' + str(e)))
sys.exit(1)
def unpublish(self):
self.service.unpublish()
def stop(self):
self.shutdown.set()
def typeToPb(self, type):
if type == 'QT5_QML':
return QT5_QML
elif type == 'GLADEVCP':
return GLADEVCP
else:
return JAVASCRIPT
def send_msg(self, dest, type):
self.tx.type = type
txBuffer = self.tx.SerializeToString()
if self.debug:
print(("send_msg " + str(self.tx)))
self.tx.Clear()
self.socket.send_multipart([dest, txBuffer])
def list_apps(self, origin):
for name in self.cfg.sections():
app = self.tx.app.add()
app.name = name
app.description = self.cfg.get(name, 'description')
app.type = self.typeToPb(self.cfg.get(name, 'type'))
self.send_msg(origin, MT_DESCRIBE_APPLICATION)
def add_files(self, basePath, path, app):
if self.debug:
print(("add files " + path))
for f in os.listdir(path):
pathname = os.path.join(path, f)
mode = os.stat(pathname).st_mode
if S_ISREG(mode):
filename = os.path.join(os.path.relpath(path, basePath), f)
if self.debug:
print(("add " + pathname))
print(("name " + filename))
fileBuffer = open(pathname, 'rb').read()
appFile = app.file.add()
appFile.name = filename
appFile.encoding = CLEARTEXT
appFile.blob = fileBuffer
elif S_ISDIR(mode):
self.add_files(basePath, pathname, app)
def retrieve_app(self, origin, name):
if self.debug:
print(("retrieve app " + name))
app = self.tx.app.add()
app.name = name
app.description = self.cfg.get(name, 'description')
app.type = self.typeToPb(self.cfg.get(name, 'type'))
self.add_files(self.cfg.get(name, 'files'),
self.cfg.get(name, 'files'), app)
self.send_msg(origin, MT_APPLICATION_DETAIL)
def process(self, s):
if self.debug:
print("process called")
try:
(origin, msg) = s.recv_multipart()
except Exception as e:
print(("Exception " + str(e)))
return
self.rx.ParseFromString(msg)
if self.rx.type == MT_LIST_APPLICATIONS:
self.list_apps(origin)
return
if self.rx.type == MT_RETRIEVE_APPLICATION:
a = self.rx.app[0]
self.retrieve_app(origin, a.name)
return
note = self.tx.note.add()
note = "unsupported request type %d" % (self.rx.type)
self.send_msg(origin, MT_ERROR)
def choose_ip(pref):
'''
given an interface preference list, return a tuple (interface, ip)
or None if no match found
If an interface has several ip addresses, the first one is picked.
pref is a list of interface names or prefixes:
pref = ['eth0','usb3']
or
pref = ['wlan','eth', 'usb']
'''
# retrieve list of network interfaces
interfaces = netifaces.interfaces()
# find a match in preference oder
for p in pref:
for i in interfaces:
if i.startswith(p):
ifcfg = netifaces.ifaddresses(i)
# we want the first ip address
try:
ip = ifcfg[netifaces.AF_INET][0]['addr']
except KeyError:
continue
return (i, ip)
return None
shutdown = False
def _exitHandler(signum, frame):
global shutdown
shutdown = True
# register exit signal handlers
def register_exit_handler():
signal.signal(signal.SIGINT, _exitHandler)
signal.signal(signal.SIGTERM, _exitHandler)
def check_exit():
global shutdown
return shutdown
def main():
parser = argparse.ArgumentParser(description='Configserver is the entry point for Machinetalk based user interfaces')
parser.add_argument('-n', '--name', help='Name of the machine', default="Machinekit")
parser.add_argument('-s', '--suppress_ip', help='Do not show ip of machine in service name', action='store_false')
parser.add_argument('-d', '--debug', help='Enable debug mode', action='store_true')
parser.add_argument('dirs', nargs='*', help="List of directories to scan for user interface configurations")
args = parser.parse_args()
debug = args.debug
mkconfig = config.Config()
mkini = os.getenv("MACHINEKIT_INI")
if mkini is None:
mkini = mkconfig.MACHINEKIT_INI
if not os.path.isfile(mkini):
sys.stderr.write("MACHINEKIT_INI " + mkini + " does not exist\n")
sys.exit(1)
mki = ConfigParser.ConfigParser()
mki.read(mkini)
uuid = mki.get("MACHINEKIT", "MKUUID")
remote = mki.getint("MACHINEKIT", "REMOTE")
prefs = mki.get("MACHINEKIT", "INTERFACES").split()
if remote == 0:
print("Remote communication is deactivated, configserver will use the loopback interfaces")
print(("set REMOTE in " + mkini + " to 1 to enable remote communication"))
iface = ['lo', '127.0.0.1']
else:
iface = choose_ip(prefs)
if not iface:
sys.stderr.write("failed to determine preferred interface (preference = %s)\n" % prefs)
sys.exit(1)
if debug:
print(("announcing configserver on " + str(iface)))
context = zmq.Context()
context.linger = 0
register_exit_handler()
configService = None
try:
configService = ConfigServer(context,
svcUuid=uuid,
topdir=".",
ip=iface[1],
appDirs=args.dirs,
name=args.name,
ipInName=bool(args.suppress_ip),
debug=debug)
while configService.running and not check_exit():
time.sleep(1)
except Exception as e:
print("exception")
print(e)
except:
print("other exception")
if debug:
print("stopping threads")
if configService is not None:
configService.stop()
# wait for all threads to terminate
while threading.active_count() > 1:
time.sleep(0.1)
if debug:
print("threads stopped")
sys.exit(0)
if __name__ == "__main__":
main()
| EqAfrica/machinekit | src/machinetalk/config-service/configserver.py | Python | lgpl-2.1 | 9,656 |
#!/usr/bin/env python
#
#CustomScript extension
#
# Copyright 2014 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import os
import json
import unittest
import env
import aem
from Utils.WAAgentUtil import waagent
TestPublicConfig = """\
{
"cfg": [{
"key": "vmsize",
"value": "Small (A1)"
},{
"key": "vm.roleinstance",
"value": "osupdate"
},{
"key": "vm.role",
"value": "IaaS"
},{
"key": "vm.deploymentid",
"value": "cd98461b43364478a908d03d0c3135a7"
},{
"key": "vm.memory.isovercommitted",
"value": 0
},{
"key": "vm.cpu.isovercommitted",
"value": 0
},{
"key": "script.version",
"value": "1.2.0.0"
},{
"key": "verbose",
"value": "0"
},{
"key": "osdisk.connminute",
"value": "asdf.minute"
},{
"key": "osdisk.connhour",
"value": "asdf.hour"
},{
"key": "osdisk.name",
"value": "osupdate-osupdate-2015-02-12.vhd"
},{
"key": "asdf.hour.uri",
"value": "https://asdf.table.core.windows.net/$metricshourprimarytransactionsblob"
},{
"key": "asdf.minute.uri",
"value": "https://asdf.table.core.windows.net/$metricsminuteprimarytransactionsblob"
},{
"key": "asdf.hour.name",
"value": "asdf"
},{
"key": "asdf.minute.name",
"value": "asdf"
},{
"key": "wad.name",
"value": "asdf"
},{
"key": "wad.isenabled",
"value": "1"
},{
"key": "wad.uri",
"value": "https://asdf.table.core.windows.net/wadperformancecounterstable"
}]
}
"""
TestPrivateConfig = """\
{
"cfg" : [{
"key" : "asdf.minute.key",
"value" : "qwer"
},{
"key" : "wad.key",
"value" : "qwer"
}]
}
"""
class TestAEM(unittest.TestCase):
def setUp(self):
waagent.LoggerInit("/dev/null", "/dev/stdout")
def test_config(self):
publicConfig = json.loads(TestPublicConfig)
privateConfig = json.loads(TestPrivateConfig)
config = aem.EnhancedMonitorConfig(publicConfig, privateConfig)
self.assertNotEquals(None, config)
self.assertEquals(".table.core.windows.net",
config.getStorageHostBase('asdf'))
self.assertEquals(".table.core.windows.net",
config.getLADHostBase())
return config
def test_static_datasource(self):
config = self.test_config()
dataSource = aem.StaticDataSource(config)
counters = dataSource.collect()
self.assertNotEquals(None, counters)
self.assertNotEquals(0, len(counters))
name = "Cloud Provider"
counter = next((c for c in counters if c.name == name))
self.assertNotEquals(None, counter)
self.assertEquals("Microsoft Azure", counter.value)
name = "Virtualization Solution Version"
counter = next((c for c in counters if c.name == name))
self.assertNotEquals(None, counter)
self.assertNotEquals(None, counter.value)
name = "Virtualization Solution"
counter = next((c for c in counters if c.name == name))
self.assertNotEquals(None, counter)
self.assertNotEquals(None, counter.value)
name = "Instance Type"
counter = next((c for c in counters if c.name == name))
self.assertNotEquals(None, counter)
self.assertEquals("Small (A1)", counter.value)
name = "Data Sources"
counter = next((c for c in counters if c.name == name))
self.assertNotEquals(None, counter)
self.assertEquals("wad", counter.value)
name = "Data Provider Version"
counter = next((c for c in counters if c.name == name))
self.assertNotEquals(None, counter)
self.assertEquals("2.0.0", counter.value)
name = "Memory Over-Provisioning"
counter = next((c for c in counters if c.name == name))
self.assertNotEquals(None, counter)
self.assertEquals("no", counter.value)
name = "CPU Over-Provisioning"
counter = next((c for c in counters if c.name == name))
self.assertNotEquals(None, counter)
self.assertEquals("no", counter.value)
def test_cpuinfo(self):
cpuinfo = aem.CPUInfo.getCPUInfo()
self.assertNotEquals(None, cpuinfo)
self.assertNotEquals(0, cpuinfo.getNumOfCoresPerCPU())
self.assertNotEquals(0, cpuinfo.getNumOfCores())
self.assertNotEquals(None, cpuinfo.getProcessorType())
self.assertEquals(float, type(cpuinfo.getFrequency()))
self.assertEquals(bool, type(cpuinfo.isHyperThreadingOn()))
percent = cpuinfo.getCPUPercent()
self.assertEquals(float, type(percent))
self.assertTrue(percent >= 0 and percent <= 100)
def test_meminfo(self):
meminfo = aem.MemoryInfo()
self.assertNotEquals(None, meminfo.getMemSize())
self.assertEquals(long, type(meminfo.getMemSize()))
percent = meminfo.getMemPercent()
self.assertEquals(float, type(percent))
self.assertTrue(percent >= 0 and percent <= 100)
def test_networkinfo(self):
netinfo = aem.NetworkInfo()
adapterIds = netinfo.getAdapterIds()
self.assertNotEquals(None, adapterIds)
self.assertNotEquals(0, len(adapterIds))
adapterId = adapterIds[0]
self.assertNotEquals(None, aem.getMacAddress(adapterId))
self.assertNotEquals(None, netinfo.getNetworkReadBytes())
self.assertNotEquals(None, netinfo.getNetworkWriteBytes())
self.assertNotEquals(None, netinfo.getNetworkPacketRetransmitted())
def test_hwchangeinfo(self):
netinfo = aem.NetworkInfo()
testHwInfoFile = "/tmp/HwInfo"
aem.HwInfoFile = testHwInfoFile
if os.path.isfile(testHwInfoFile):
os.remove(testHwInfoFile)
hwChangeInfo = aem.HardwareChangeInfo(netinfo)
self.assertNotEquals(None, hwChangeInfo.getLastHardwareChange())
self.assertTrue(os.path.isfile, aem.HwInfoFile)
#No hardware change
lastChange = hwChangeInfo.getLastHardwareChange()
hwChangeInfo = aem.HardwareChangeInfo(netinfo)
self.assertEquals(lastChange, hwChangeInfo.getLastHardwareChange())
#Create mock hardware
waagent.SetFileContents(testHwInfoFile, ("0\nma-ca-sa-ds-02"))
hwChangeInfo = aem.HardwareChangeInfo(netinfo)
self.assertNotEquals(None, hwChangeInfo.getLastHardwareChange())
def test_linux_metric(self):
config = self.test_config()
metric = aem.LinuxMetric(config)
self.validate_cnm_metric(metric)
#Metric for CPU, network and memory
def validate_cnm_metric(self, metric):
self.assertNotEquals(None, metric.getCurrHwFrequency())
self.assertNotEquals(None, metric.getMaxHwFrequency())
self.assertNotEquals(None, metric.getCurrVMProcessingPower())
self.assertNotEquals(None, metric.getGuaranteedMemAssigned())
self.assertNotEquals(None, metric.getMaxVMProcessingPower())
self.assertNotEquals(None, metric.getNumOfCoresPerCPU())
self.assertNotEquals(None, metric.getNumOfThreadsPerCore())
self.assertNotEquals(None, metric.getPhysProcessingPowerPerVCPU())
self.assertNotEquals(None, metric.getProcessorType())
self.assertNotEquals(None, metric.getReferenceComputeUnit())
self.assertNotEquals(None, metric.getVCPUMapping())
self.assertNotEquals(None, metric.getVMProcessingPowerConsumption())
self.assertNotEquals(None, metric.getCurrMemAssigned())
self.assertNotEquals(None, metric.getGuaranteedMemAssigned())
self.assertNotEquals(None, metric.getMaxMemAssigned())
self.assertNotEquals(None, metric.getVMMemConsumption())
adapterIds = metric.getNetworkAdapterIds()
self.assertNotEquals(None, adapterIds)
self.assertNotEquals(0, len(adapterIds))
adapterId = adapterIds[0]
self.assertNotEquals(None, metric.getNetworkAdapterMapping(adapterId))
self.assertNotEquals(None, metric.getMaxNetworkBandwidth(adapterId))
self.assertNotEquals(None, metric.getMinNetworkBandwidth(adapterId))
self.assertNotEquals(None, metric.getNetworkReadBytes())
self.assertNotEquals(None, metric.getNetworkWriteBytes())
self.assertNotEquals(None, metric.getNetworkPacketRetransmitted())
self.assertNotEquals(None, metric.getLastHardwareChange())
def test_vm_datasource(self):
config = self.test_config()
config.configData["wad.isenabled"] = "0"
dataSource = aem.VMDataSource(config)
counters = dataSource.collect()
self.assertNotEquals(None, counters)
self.assertNotEquals(0, len(counters))
counterNames = [
"Current Hw Frequency",
"Current VM Processing Power",
"Guaranteed VM Processing Power",
"Max Hw Frequency",
"Max. VM Processing Power",
"Number of Cores per CPU",
"Number of Threads per Core",
"Phys. Processing Power per vCPU",
"Processor Type",
"Reference Compute Unit",
"vCPU Mapping",
"VM Processing Power Consumption",
"Current Memory assigned",
"Guaranteed Memory assigned",
"Max Memory assigned",
"VM Memory Consumption",
"Adapter Id",
"Mapping",
"Maximum Network Bandwidth",
"Minimum Network Bandwidth",
"Network Read Bytes",
"Network Write Bytes",
"Packets Retransmitted"
]
#print "\n".join(map(lambda c: str(c), counters))
for name in counterNames:
#print name
counter = next((c for c in counters if c.name == name))
self.assertNotEquals(None, counter)
self.assertNotEquals(None, counter.value)
def test_storagemetric(self):
metrics = mock_getStorageMetrics()
self.assertNotEquals(None, metrics)
stat = aem.AzureStorageStat(metrics)
self.assertNotEquals(None, stat.getReadBytes())
self.assertNotEquals(None, stat.getReadOps())
self.assertNotEquals(None, stat.getReadOpE2ELatency())
self.assertNotEquals(None, stat.getReadOpServerLatency())
self.assertNotEquals(None, stat.getReadOpThroughput())
self.assertNotEquals(None, stat.getWriteBytes())
self.assertNotEquals(None, stat.getWriteOps())
self.assertNotEquals(None, stat.getWriteOpE2ELatency())
self.assertNotEquals(None, stat.getWriteOpServerLatency())
self.assertNotEquals(None, stat.getWriteOpThroughput())
def test_disk_info(self):
config = self.test_config()
mapping = aem.DiskInfo(config).getDiskMapping()
self.assertNotEquals(None, mapping)
def test_get_storage_key_range(self):
startKey, endKey = aem.getStorageTableKeyRange()
self.assertNotEquals(None, startKey)
self.assertEquals(13, len(startKey))
self.assertNotEquals(None, endKey)
self.assertEquals(13, len(endKey))
def test_storage_datasource(self):
aem.getStorageMetrics = mock_getStorageMetrics
config = self.test_config()
dataSource = aem.StorageDataSource(config)
counters = dataSource.collect()
self.assertNotEquals(None, counters)
self.assertNotEquals(0, len(counters))
counterNames = [
"Phys. Disc to Storage Mapping",
"Storage ID",
"Storage Read Bytes",
"Storage Read Op Latency E2E msec",
"Storage Read Op Latency Server msec",
"Storage Read Ops",
"Storage Read Throughput E2E MB/sec",
"Storage Write Bytes",
"Storage Write Op Latency E2E msec",
"Storage Write Op Latency Server msec",
"Storage Write Ops",
"Storage Write Throughput E2E MB/sec"
]
#print "\n".join(map(lambda c: str(c), counters))
for name in counterNames:
#print name
counter = next((c for c in counters if c.name == name))
self.assertNotEquals(None, counter)
self.assertNotEquals(None, counter.value)
def test_writer(self):
testEventFile = "/tmp/Event"
if os.path.isfile(testEventFile):
os.remove(testEventFile)
writer = aem.PerfCounterWriter()
counters = [aem.PerfCounter(counterType = 0,
category = "test",
name = "test",
value = "test",
unit = "test")]
writer.write(counters, eventFile = testEventFile)
with open(testEventFile) as F:
content = F.read()
self.assertEquals(str(counters[0]), content)
testEventFile = "/dev/console"
print("==============================")
print("The warning below is expected.")
self.assertRaises(IOError, writer.write, counters, 2, testEventFile)
print("==============================")
def test_easyHash(self):
hashVal = aem.easyHash('a')
self.assertEquals(97, hashVal)
hashVal = aem.easyHash('ab')
self.assertEquals(87, hashVal)
hashVal = aem.easyHash(("ciextension-SUSELinuxEnterpriseServer11SP3"
"___role1___"
"ciextension-SUSELinuxEnterpriseServer11SP3"))
self.assertEquals(5, hashVal)
def test_get_ad_key_range(self):
startKey, endKey = aem.getAzureDiagnosticKeyRange()
print(startKey)
print(endKey)
def test_get_mds_timestamp(self):
date = datetime.datetime(2015, 1, 26, 3, 54)
epoch = datetime.datetime.utcfromtimestamp(0)
unixTimestamp = (int((date - epoch).total_seconds()))
mdsTimestamp = aem.getMDSTimestamp(unixTimestamp)
self.assertEquals(635578412400000000, mdsTimestamp)
def test_get_storage_timestamp(self):
date = datetime.datetime(2015, 1, 26, 3, 54)
epoch = datetime.datetime.utcfromtimestamp(0)
unixTimestamp = (int((date - epoch).total_seconds()))
storageTimestamp = aem.getStorageTimestamp(unixTimestamp)
self.assertEquals("20150126T0354", storageTimestamp)
def mock_getStorageMetrics(*args, **kwargs):
with open(os.path.join(env.test_dir, "storage_metrics")) as F:
test_data = F.read()
jsonObjs = json.loads(test_data)
class ObjectView(object):
def __init__(self, data):
self.__dict__ = data
metrics = map(lambda x : ObjectView(x), jsonObjs)
return metrics
if __name__ == '__main__':
unittest.main()
| Azure/azure-linux-extensions | AzureEnhancedMonitor/ext/test/test_aem.py | Python | apache-2.0 | 15,568 |
# -*- coding: utf-8 -*-
from django.utils.translation import ugettext as _
from django.http import HttpResponseBadRequest
import oauth2 as oauth
from oauth_provider.utils import send_oauth_error
INVALID_CONSUMER_RESPONSE = HttpResponseBadRequest('Invalid Consumer.')
def invalid_params_response(scheme, domain):
send_oauth_error(
oauth.Error(scheme, domain, _('Invalid request parameters.')))
def invalid_scope_response(scheme, domain):
send_oauth_error(scheme, domain,
oauth.Error(_('You are not allowed to access this resource.')))
def could_not_verify_oauth_request_response(scheme, domain):
send_oauth_error(scheme, domain,
oauth.Error(_('Could not verify OAuth request.')))
| ljwolford/ADL_LRS | oauth_provider/responses.py | Python | apache-2.0 | 698 |
# TODO: Needs a better name; too many modules are already called "concat"
from collections import defaultdict
import copy
import numpy as np
from pandas._libs import internals as libinternals, tslibs
from pandas.util._decorators import cache_readonly
from pandas.core.dtypes.cast import maybe_promote
from pandas.core.dtypes.common import (
_get_dtype, is_categorical_dtype, is_datetime64_dtype,
is_datetime64tz_dtype, is_extension_array_dtype, is_float_dtype,
is_numeric_dtype, is_sparse, is_timedelta64_dtype)
import pandas.core.dtypes.concat as _concat
from pandas.core.dtypes.missing import isna
import pandas.core.algorithms as algos
def get_mgr_concatenation_plan(mgr, indexers):
"""
Construct concatenation plan for given block manager and indexers.
Parameters
----------
mgr : BlockManager
indexers : dict of {axis: indexer}
Returns
-------
plan : list of (BlockPlacement, JoinUnit) tuples
"""
# Calculate post-reindex shape , save for item axis which will be separate
# for each block anyway.
mgr_shape = list(mgr.shape)
for ax, indexer in indexers.items():
mgr_shape[ax] = len(indexer)
mgr_shape = tuple(mgr_shape)
if 0 in indexers:
ax0_indexer = indexers.pop(0)
blknos = algos.take_1d(mgr._blknos, ax0_indexer, fill_value=-1)
blklocs = algos.take_1d(mgr._blklocs, ax0_indexer, fill_value=-1)
else:
if mgr._is_single_block:
blk = mgr.blocks[0]
return [(blk.mgr_locs, JoinUnit(blk, mgr_shape, indexers))]
ax0_indexer = None
blknos = mgr._blknos
blklocs = mgr._blklocs
plan = []
for blkno, placements in libinternals.get_blkno_placements(blknos,
mgr.nblocks,
group=False):
assert placements.is_slice_like
join_unit_indexers = indexers.copy()
shape = list(mgr_shape)
shape[0] = len(placements)
shape = tuple(shape)
if blkno == -1:
unit = JoinUnit(None, shape)
else:
blk = mgr.blocks[blkno]
ax0_blk_indexer = blklocs[placements.indexer]
unit_no_ax0_reindexing = (len(placements) == len(blk.mgr_locs) and
# Fastpath detection of join unit not
# needing to reindex its block: no ax0
# reindexing took place and block
# placement was sequential before.
((ax0_indexer is None and
blk.mgr_locs.is_slice_like and
blk.mgr_locs.as_slice.step == 1) or
# Slow-ish detection: all indexer locs
# are sequential (and length match is
# checked above).
(np.diff(ax0_blk_indexer) == 1).all()))
# Omit indexer if no item reindexing is required.
if unit_no_ax0_reindexing:
join_unit_indexers.pop(0, None)
else:
join_unit_indexers[0] = ax0_blk_indexer
unit = JoinUnit(blk, shape, join_unit_indexers)
plan.append((placements, unit))
return plan
class JoinUnit:
def __init__(self, block, shape, indexers=None):
# Passing shape explicitly is required for cases when block is None.
if indexers is None:
indexers = {}
self.block = block
self.indexers = indexers
self.shape = shape
def __repr__(self):
return '{name}({block!r}, {indexers})'.format(
name=self.__class__.__name__, block=self.block,
indexers=self.indexers)
@cache_readonly
def needs_filling(self):
for indexer in self.indexers.values():
# FIXME: cache results of indexer == -1 checks.
if (indexer == -1).any():
return True
return False
@cache_readonly
def dtype(self):
if self.block is None:
raise AssertionError("Block is None, no dtype")
if not self.needs_filling:
return self.block.dtype
else:
return _get_dtype(maybe_promote(self.block.dtype,
self.block.fill_value)[0])
@cache_readonly
def is_na(self):
if self.block is None:
return True
if not self.block._can_hold_na:
return False
# Usually it's enough to check but a small fraction of values to see if
# a block is NOT null, chunks should help in such cases. 1000 value
# was chosen rather arbitrarily.
values = self.block.values
if self.block.is_categorical:
values_flat = values.categories
elif is_sparse(self.block.values.dtype):
return False
elif self.block.is_extension:
values_flat = values
else:
values_flat = values.ravel(order='K')
total_len = values_flat.shape[0]
chunk_len = max(total_len // 40, 1000)
for i in range(0, total_len, chunk_len):
if not isna(values_flat[i:i + chunk_len]).all():
return False
return True
def get_reindexed_values(self, empty_dtype, upcasted_na):
if upcasted_na is None:
# No upcasting is necessary
fill_value = self.block.fill_value
values = self.block.get_values()
else:
fill_value = upcasted_na
if self.is_na:
if getattr(self.block, 'is_object', False):
# we want to avoid filling with np.nan if we are
# using None; we already know that we are all
# nulls
values = self.block.values.ravel(order='K')
if len(values) and values[0] is None:
fill_value = None
if (getattr(self.block, 'is_datetimetz', False) or
is_datetime64tz_dtype(empty_dtype)):
if self.block is None:
array = empty_dtype.construct_array_type()
return array(np.full(self.shape[1], fill_value.value),
dtype=empty_dtype)
pass
elif getattr(self.block, 'is_categorical', False):
pass
elif getattr(self.block, 'is_extension', False):
pass
else:
missing_arr = np.empty(self.shape, dtype=empty_dtype)
missing_arr.fill(fill_value)
return missing_arr
if not self.indexers:
if not self.block._can_consolidate:
# preserve these for validation in _concat_compat
return self.block.values
if self.block.is_bool and not self.block.is_categorical:
# External code requested filling/upcasting, bool values must
# be upcasted to object to avoid being upcasted to numeric.
values = self.block.astype(np.object_).values
elif self.block.is_extension:
values = self.block.values
else:
# No dtype upcasting is done here, it will be performed during
# concatenation itself.
values = self.block.get_values()
if not self.indexers:
# If there's no indexing to be done, we want to signal outside
# code that this array must be copied explicitly. This is done
# by returning a view and checking `retval.base`.
values = values.view()
else:
for ax, indexer in self.indexers.items():
values = algos.take_nd(values, indexer, axis=ax,
fill_value=fill_value)
return values
def concatenate_join_units(join_units, concat_axis, copy):
"""
Concatenate values from several join units along selected axis.
"""
if concat_axis == 0 and len(join_units) > 1:
# Concatenating join units along ax0 is handled in _merge_blocks.
raise AssertionError("Concatenating join units along axis0")
empty_dtype, upcasted_na = get_empty_dtype_and_na(join_units)
to_concat = [ju.get_reindexed_values(empty_dtype=empty_dtype,
upcasted_na=upcasted_na)
for ju in join_units]
if len(to_concat) == 1:
# Only one block, nothing to concatenate.
concat_values = to_concat[0]
if copy:
if isinstance(concat_values, np.ndarray):
# non-reindexed (=not yet copied) arrays are made into a view
# in JoinUnit.get_reindexed_values
if concat_values.base is not None:
concat_values = concat_values.copy()
else:
concat_values = concat_values.copy()
else:
concat_values = _concat._concat_compat(to_concat, axis=concat_axis)
return concat_values
def get_empty_dtype_and_na(join_units):
"""
Return dtype and N/A values to use when concatenating specified units.
Returned N/A value may be None which means there was no casting involved.
Returns
-------
dtype
na
"""
if len(join_units) == 1:
blk = join_units[0].block
if blk is None:
return np.float64, np.nan
if is_uniform_reindex(join_units):
# XXX: integrate property
empty_dtype = join_units[0].block.dtype
upcasted_na = join_units[0].block.fill_value
return empty_dtype, upcasted_na
has_none_blocks = False
dtypes = [None] * len(join_units)
for i, unit in enumerate(join_units):
if unit.block is None:
has_none_blocks = True
else:
dtypes[i] = unit.dtype
upcast_classes = defaultdict(list)
null_upcast_classes = defaultdict(list)
for dtype, unit in zip(dtypes, join_units):
if dtype is None:
continue
if is_categorical_dtype(dtype):
upcast_cls = 'category'
elif is_datetime64tz_dtype(dtype):
upcast_cls = 'datetimetz'
elif issubclass(dtype.type, np.bool_):
upcast_cls = 'bool'
elif issubclass(dtype.type, np.object_):
upcast_cls = 'object'
elif is_datetime64_dtype(dtype):
upcast_cls = 'datetime'
elif is_timedelta64_dtype(dtype):
upcast_cls = 'timedelta'
elif is_sparse(dtype):
upcast_cls = dtype.subtype.name
elif is_extension_array_dtype(dtype):
upcast_cls = 'object'
elif is_float_dtype(dtype) or is_numeric_dtype(dtype):
upcast_cls = dtype.name
else:
upcast_cls = 'float'
# Null blocks should not influence upcast class selection, unless there
# are only null blocks, when same upcasting rules must be applied to
# null upcast classes.
if unit.is_na:
null_upcast_classes[upcast_cls].append(dtype)
else:
upcast_classes[upcast_cls].append(dtype)
if not upcast_classes:
upcast_classes = null_upcast_classes
# create the result
if 'object' in upcast_classes:
return np.dtype(np.object_), np.nan
elif 'bool' in upcast_classes:
if has_none_blocks:
return np.dtype(np.object_), np.nan
else:
return np.dtype(np.bool_), None
elif 'category' in upcast_classes:
return np.dtype(np.object_), np.nan
elif 'datetimetz' in upcast_classes:
# GH-25014. We use NaT instead of iNaT, since this eventually
# ends up in DatetimeArray.take, which does not allow iNaT.
dtype = upcast_classes['datetimetz']
return dtype[0], tslibs.NaT
elif 'datetime' in upcast_classes:
return np.dtype('M8[ns]'), tslibs.iNaT
elif 'timedelta' in upcast_classes:
return np.dtype('m8[ns]'), tslibs.iNaT
else: # pragma
try:
g = np.find_common_type(upcast_classes, [])
except TypeError:
# At least one is an ExtensionArray
return np.dtype(np.object_), np.nan
else:
if is_float_dtype(g):
return g, g.type(np.nan)
elif is_numeric_dtype(g):
if has_none_blocks:
return np.float64, np.nan
else:
return g, None
msg = "invalid dtype determination in get_concat_dtype"
raise AssertionError(msg)
def is_uniform_join_units(join_units):
"""
Check if the join units consist of blocks of uniform type that can
be concatenated using Block.concat_same_type instead of the generic
concatenate_join_units (which uses `_concat._concat_compat`).
"""
return (
# all blocks need to have the same type
all(type(ju.block) is type(join_units[0].block) for ju in join_units) and # noqa
# no blocks that would get missing values (can lead to type upcasts)
# unless we're an extension dtype.
all(not ju.is_na or ju.block.is_extension for ju in join_units) and
# no blocks with indexers (as then the dimensions do not fit)
all(not ju.indexers for ju in join_units) and
# disregard Panels
all(ju.block.ndim <= 2 for ju in join_units) and
# only use this path when there is something to concatenate
len(join_units) > 1)
def is_uniform_reindex(join_units):
return (
# TODO: should this be ju.block._can_hold_na?
all(ju.block and ju.block.is_extension for ju in join_units) and
len({ju.block.dtype.name for ju in join_units}) == 1
)
def trim_join_unit(join_unit, length):
"""
Reduce join_unit's shape along item axis to length.
Extra items that didn't fit are returned as a separate block.
"""
if 0 not in join_unit.indexers:
extra_indexers = join_unit.indexers
if join_unit.block is None:
extra_block = None
else:
extra_block = join_unit.block.getitem_block(slice(length, None))
join_unit.block = join_unit.block.getitem_block(slice(length))
else:
extra_block = join_unit.block
extra_indexers = copy.copy(join_unit.indexers)
extra_indexers[0] = extra_indexers[0][length:]
join_unit.indexers[0] = join_unit.indexers[0][:length]
extra_shape = (join_unit.shape[0] - length,) + join_unit.shape[1:]
join_unit.shape = (length,) + join_unit.shape[1:]
return JoinUnit(block=extra_block, indexers=extra_indexers,
shape=extra_shape)
def combine_concat_plans(plans, concat_axis):
"""
Combine multiple concatenation plans into one.
existing_plan is updated in-place.
"""
if len(plans) == 1:
for p in plans[0]:
yield p[0], [p[1]]
elif concat_axis == 0:
offset = 0
for plan in plans:
last_plc = None
for plc, unit in plan:
yield plc.add(offset), [unit]
last_plc = plc
if last_plc is not None:
offset += last_plc.as_slice.stop
else:
num_ended = [0]
def _next_or_none(seq):
retval = next(seq, None)
if retval is None:
num_ended[0] += 1
return retval
plans = list(map(iter, plans))
next_items = list(map(_next_or_none, plans))
while num_ended[0] != len(next_items):
if num_ended[0] > 0:
raise ValueError("Plan shapes are not aligned")
placements, units = zip(*next_items)
lengths = list(map(len, placements))
min_len, max_len = min(lengths), max(lengths)
if min_len == max_len:
yield placements[0], units
next_items[:] = map(_next_or_none, plans)
else:
yielded_placement = None
yielded_units = [None] * len(next_items)
for i, (plc, unit) in enumerate(next_items):
yielded_units[i] = unit
if len(plc) > min_len:
# trim_join_unit updates unit in place, so only
# placement needs to be sliced to skip min_len.
next_items[i] = (plc[min_len:],
trim_join_unit(unit, min_len))
else:
yielded_placement = plc
next_items[i] = _next_or_none(plans[i])
yield yielded_placement, yielded_units
| cbertinato/pandas | pandas/core/internals/concat.py | Python | bsd-3-clause | 17,025 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
#
# Tests of the grammar
#
# Code takes a tab-delimited text file of the form:
#
# Func Test Valid InType Expected
# pm -+ True string
# pm * False one
# num 1|+1 True list 1|1
#
# Headers are defined as follows:
# Func: function name to call in the grammar
# Test: item(s) to test
# Valid: if the input is expected to be valid (True or False)
# InType: 3 type:
# - one: input is a single value
# - string: input is a string; test each character in the string separately
# - list: input is a list delimited by a pipe character ('|')
# Expected: expected result (if stringifying input does not return the same answer, e,g. '+1' -> '1')
# - if expected is left blank, then it is assumed that stringifying the parsed input returns the same answer.
#
import os
import pkg_resources
import pprint
import unittest
import unicodecsv as csv
import hgvs.parser
class TestGrammarFull(unittest.TestCase):
def setUp(self):
self.p = hgvs.parser.Parser()
self.grammar = self.p._grammar
self._test_fn = os.path.join(os.path.dirname(__file__), 'data', 'grammar_test.tsv')
def test_parser_test_completeness(self):
rules_tested = set()
with open(self._test_fn, 'r') as f:
reader = csv.DictReader(f, delimiter=str('\t'))
for row in reader:
rules_tested.add(row['Func'])
rules_all= set()
grammar_fn = pkg_resources.resource_filename(__name__, '../hgvs/_data/hgvs.pymeta')
for line in open(grammar_fn, 'r'):
if len(line) > 0 and line[0] != '#' and line[0].isalpha():
line = line.strip()
rules_all.add(line.split()[0])
rules_untested = rules_all - rules_tested
msg = "untested rules: {}".format(rules_untested)
self.assertTrue(len(rules_untested) == 0, msg)
def test_parser_grammar(self):
with open(self._test_fn, 'r') as f:
reader = csv.DictReader(f, delimiter=str('\t'))
fail_cases = []
for row in reader:
if row['Func'].startswith('#'):
continue
# setup input
inputs = self._split_inputs(row['Test'], row['InType'])
expected_results = self._split_inputs(row['Expected'], row['InType']) if row['Expected'] else inputs
expected_map = dict(zip(inputs, expected_results))
# step through each item and check
is_valid = True if row['Valid'].lower() == 'true' else False
for key in expected_map:
expected_result = unicode(expected_map[key])
function_to_test = getattr(self.p._grammar(key), row['Func'])
row_str = u"{}\t{}\t{}\t{}\t{}".format(row['Func'], key, row['Valid'], 'one', expected_result)
try:
actual_result = unicode(function_to_test())
if not is_valid or (expected_result != actual_result):
print( "expected: {} actual:{}".format(expected_result, actual_result) )
fail_cases.append(row_str)
except Exception as e:
if is_valid:
print( "expected: {} Exception: {}".format(expected_result, e) )
fail_cases.append(row_str)
# everything should have passed - report whatever failed
self.assertTrue(len(fail_cases) == 0, pprint.pprint(fail_cases))
def _split_inputs(self, in_string, intype):
DELIM = '|'
if intype == 'list':
inputs = in_string.split(DELIM)
elif intype == 'string':
inputs = list(in_string)
else: # intype == 'one'
inputs = [in_string]
inputs = [x if x != 'None' else None for x in inputs]
return inputs
if __name__ == '__main__':
unittest.main()
## <LICENSE>
## Copyright 2014 HGVS Contributors (https://bitbucket.org/hgvs/hgvs)
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
## </LICENSE>
| jmuhlich/hgvs | tests/test_hgvs_grammar_full.py | Python | apache-2.0 | 4,756 |
'''OpenGL extension VERSION.GL_1_5
This module customises the behaviour of the
OpenGL.raw.GL.VERSION.GL_1_5 to provide a more
Python-friendly API
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/VERSION/GL_1_5.txt
'''
from OpenGL import platform, constants, constant, arrays
from OpenGL import extensions, wrapper
from OpenGL.GL import glget
import ctypes
from OpenGL.raw.GL.VERSION.GL_1_5 import *
### END AUTOGENERATED SECTION
from OpenGL.lazywrapper import lazy
from OpenGL.arrays import ArrayDatatype
glDeleteBuffers = arrays.setInputArraySizeType(
glDeleteBuffers,
None,
arrays.GLuintArray,
'buffers',
)
glGenBuffers = wrapper.wrapper( glGenBuffers ).setOutput(
'buffers', lambda n: (n,), 'n',
)
def _sizeOfArrayInput( pyArgs, index, wrapper ):
return (
arrays.ArrayDatatype.arrayByteCount( pyArgs[index] )
)
@lazy( glBufferData )
def glBufferData( baseOperation, target, size, data=None, usage=None ):
"""Copy given data into the currently bound vertex-buffer-data object
target -- the symbolic constant indicating which buffer type is intended
size -- if provided, the count-in-bytes of the array
data -- data-pointer to be used, may be None to initialize without
copying over a data-set
usage -- hint to the driver as to how to set up access to the buffer
Note: parameter "size" can be omitted, which makes the signature
glBufferData( target, data, usage )
instead of:
glBufferData( target, size, data, usage )
"""
if usage is None:
usage = data
data = size
size = None
data = ArrayDatatype.asArray( data )
if size is None:
size = ArrayDatatype.arrayByteCount( data )
return baseOperation( target, size, data, usage )
@lazy( glBufferSubData )
def glBufferSubData( baseOperation, target, offset, size, data=None ):
"""Copy subset of data into the currently bound vertex-buffer-data object
target -- the symbolic constant indicating which buffer type is intended
offset -- offset from beginning of buffer at which to copy bytes
size -- the count-in-bytes of the array (if an int/long), if None,
calculate size from data, if an array and data is None, use as
data (i.e. the parameter can be omitted and calculated)
data -- data-pointer to be used, may be None to initialize without
copying over a data-set
Note that if size is not an int/long it is considered to be data
"""
try:
if size is not None:
size = int( size )
except TypeError, err:
if data is not None:
raise TypeError(
"""Expect an integer size *or* a data-array, not both"""
)
data = size
size = None
data = ArrayDatatype.asArray( data )
if size is None:
size = ArrayDatatype.arrayByteCount( data )
return baseOperation( target, offset, size, data )
glGetBufferParameteriv = wrapper.wrapper(glGetBufferParameteriv).setOutput(
"params",(1,),
)
@lazy( glGetBufferPointerv )
def glGetBufferPointerv( baseOperation, target, pname, params=None ):
"""Retrieve a ctypes pointer to buffer's data"""
if params is None:
size = glGetBufferParameteriv( target, GL_BUFFER_SIZE )
data = arrays.ArrayDatatype.zeros( (size,), GL_UNSIGNED_BYTE )
result = baseOperation( target, pname, ctypes.byref( data ) )
return data
else:
return baseOperation( target, pname, params )
@lazy( glDeleteQueries )
def glDeleteQueries( baseOperation, n, ids=None ):
if ids is None:
ids = arrays.GLuintArray.asArray( n )
n = arrays.GLuintArray.arraySize( ids )
else:
ids = arrays.GLuintArray.asArray( ids )
return baseOperation( n,ids )
@lazy( glGenQueries )
def glGenQueries( baseOperation, n, ids=None ):
"""Generate n queries, if ids is None, is allocated
returns array of ids
"""
if ids is None:
ids = arrays.GLuintArray.zeros( (n,))
else:
ids = arrays.GLuintArray.asArray( ids )
baseOperation( n, ids )
return ids
for func in (
'glGetQueryiv','glGetQueryObjectiv','glGetQueryObjectuiv',
):
globals()[func] = wrapper.wrapper(globals()[func]).setOutput(
"params", (1,)
)
try:
del func, glget
except NameError, err:
pass
| Universal-Model-Converter/UMC3.0a | data/Python/x86/Lib/site-packages/OpenGL/GL/VERSION/GL_1_5.py | Python | mit | 4,389 |
#!/usr/bin/env python
from fs.utils import movefile, movefile_non_atomic, contains_files
from fs.commands import fscp
import sys
class FSmv(fscp.FScp):
usage = """fsmv [OPTION]... [SOURCE] [DESTINATION]
Move files from SOURCE to DESTINATION"""
def get_verb(self):
return 'moving...'
def get_action(self):
if self.options.threads > 1:
return movefile_non_atomic
else:
return movefile
def post_actions(self):
for fs, dirpath in self.root_dirs:
if not contains_files(fs, dirpath):
fs.removedir(dirpath, force=True)
def run():
return FSmv().run()
if __name__ == "__main__":
sys.exit(run())
| GbalsaC/bitnamiP | venv/lib/python2.7/site-packages/fs/commands/fsmv.py | Python | agpl-3.0 | 750 |
#!/usr/bin/env python3
# encoding: utf8
# license: ISC (MIT/BSD compatible) https://choosealicense.com/licenses/isc/
# This library is principally created for python 3. However python 2 support may be doable and is welcomed.
"""Use python in a more object oriented, saner and shorter way.
# WARNING
First: A word of warning. This library is an experiment. It is based on a wrapper that aggressively
wraps anything it comes in contact with and tries to stay invisible from then on (apart from adding methods).
However this means that this library is probably quite unsuitable for use in bigger projects. Why?
Because the wrapper will spread in your runtime image like a virus, 'infecting' more and more objects
causing strange side effects. That being said, this library is perfect for short scripts and especially
'one of' shell commands. Use it's power wisely!
# Introduction
This library is heavily inspired by jQuery and underscore / lodash in the javascript world. Or you
could say that it is inspired by SmallTalk and in extension Ruby and how they deal with collections
and how to work with them.
In JS the problem is that the standard library sucks very badly and is missing many of the
most important convenience methods. Python is better in this regard, in that it has (almost) all
those methods available somewhere. BUT: quite a lot of them are available on the wrong object or
are free methods where they really should be methods. Examples: `str.join` really should be on iterable.
`map`, `zip`, `filter` should really be on iterable. Part of this problem comes from the design
choice of the python language, to provide a strange kind of minimal duck typing interface with the __*__
methods that the free methods like `map`, `zip`, `filter` then use. This however has the unfortunate
side effect in that writing python code using these methods often requires the reader to mentally skip
back and forth in a line to parse what it does. While this is not too bad for simple usage of these
functions, it becomes a nightmare if longer statements are built up from them.
Don't believe me? Try to parse this simple example as fast as you can:
>>> map(print, map(str.upper, sys.stdin.read().split('\n')))
How many backtrackings did you have to do? To me this code means, finding out that it starts in the
middle at `sys.stdin.read().split('\n')`, then I have to backtrack to `map(str.upper, …)`, then to
`map(print, …)`. Then while writing, I have to make sure that the number of parens at the end are
correct, which is something I usually have to use editor support for as it's quite hard to accurately
identify where the matching paren is.
The problem with this? This is hard! Hard to write, as it doesn't follow the way I think about this
statement. Literally, this means I usually write these statements from the inside out and wrap them
using my editor as I write them. As demonstrated above, it's also hard to read - requireing quite a
bit of backtracking.
So, what's the problem you say? Just don't do it, it's not pythonic you say! Well, Python has two
main workarounds available for this mess. One is to use list comprehension / generator
statements like this:
>>> [print(line.upper()) for line in sys.stdin.read().split('\n')]
This is clearly better. Now you only have to skip back and forth once instead of twice Yay! Win!
To me that is not a good workaround. Sure it's nice to easily be able to create generators this
way, but it still requires of me to find where the statement starts and to backtrack to the beginning
to see what is happening. Oh, but they support filtering too!
>>> [print(line.upper()) for line in sys.stdin.read().split('\n') if line.upper().startswith('FNORD')]
Well, this is little better. For one thing, this doesn't solve the backtracking problem, but more
importantly, if the filtering has to be done on the processed version (here artificially on
`line.upper().startswith()`) then the operation has to be applied twice - which sucks because you have to write it twice, but also because it is computed twice.
The solution? Nest them!
[print(line) for line in (line.upper() for line in sys.stdin.read().split('\n')) if line.startswith('FNORD')]
Do you start seing the problem?
Compare it to this:
>>> for line in sys.stdin.read().split('\n'):
>>> uppercased = line.upper()
>>> if uppercased.startswith('FNORD'):
>>> print(uppercased)
Almost all my complaints are gone. It reads and writes almost completely in order it is computed.
Easy to read, easy to write - but one drawback. It's not an expression - it's a bunch of statements.
Which means that it's not easily combinable and abstractable with higher order methods or generators.
Also (to complain on a high level), you had to invent two variable names `line` and `uppercased`.
While that is not bad, especially if they explain what is going on - in this case it's not really
helping _and_ (drummroll) it requires some backtracking and buildup of mental state to read. Oh well.
Of course you can use explaining variables to untangle the mess of using higher order functions too:
Consider this code:
>>> cross_product_of_dependency_labels = \
>>> set(map(frozenset, itertools.product(*map(attrgetter('_labels'), dependencies))))
That certainly is hard to read (and write). Pulling out explaining variables, makes it better. Like so:
>>> labels = map(attrgetter('_labels'), dependencies)
>>> cross_product_of_dependency_labels = set(map(frozenset, itertools.product(*labels)))
Better, but still hard to read. Sure, those explaining variables are nice and sometimes
essential to understand the code. - but it does take up space in lines, and space in my head
while parsing this code. The question would be - is this really easier to read than something
like this?
>>> cross_product_of_dependency_labels = _(dependencies) \
>>> .map(_.each._labels) \
>>> .star_call(itertools.product) \
>>> .map(frozenset) \
>>> .call(set)
Sure you are not used to this at first, but consider the advantages. The intermediate variable
names are abstracted away - the data flows through the methods completely naturally. No jumping
back and forth to parse this at all. It just reads and writes exactly in the order it is computed.
What I think that I want to accomplish, I can write down directly in order. Oh, and I don't have
to keep track of extra closing parantheses at the end of the expression.
So what is the essence of all of this?
Python is an object oriented language - but it doesn't really use what object orientation has tought
us about how we can work with collections and higher order methods in the languages that came before it
(especially SmallTalk, but more recently also Ruby). Why can't I make those beautiful fluent call chains
that SmallTalk could do 20 years ago in Python today?
Well, now you can.
# Features
To enable this style of coding this library has some features that might not be so obvious at first.
## Aggressive (specialized) wrapping
The most important entry point for this library is the function `wrap` or the perhaps preferrable and
shorter alias `_`:
>>> _(something)
>>> # or
>>> wrap(something)
`wrap` is a factory function that returns a subclass of Wrapper, the basic and main object of this library.
This does two things: First it ensures that every attribute access, item access or method call off of
the wrapped object will also return a wrapped object. This means that once you wrap something, unless
you unwrap it explicitly via `.unwrap` or `._` it stays wrapped - pretty much no matter what you do
with it. The second thing this does is that it returns a subclass of Wrapper that has a specialized set
of methods depending on the type of what is wrapped. I envision this to expand in the future, but right
now the most usefull wrappers are: Iterable, where we add all the python collection functions (map,
filter, zip, reduce, …) as well as a good batch of methods from itertools and a few extras for good
measure. Callable, where we add `.curry()` and `.compose()` and Text, where most of the regex methods
are added.
## Imports as expressions
Import statements are (ahem) statements in python. This is fine, but can be really annoying at times.
Consider this shell text filter written in python:
$ curl -sL 'https://www.iblocklist.com/lists.php' | egrep -A1 'star_[345]' | python3 -c "import sys, re; from xml.sax.saxutils import unescape; print('\n'.join(map(unescape, re.findall(r'value=\'(.*)\'', sys.stdin.read()))))"
Sure it has all the backtracking problems I talked about already. Using fluent this would already be much better.
$ curl -sL 'https://www.iblocklist.com/lists.php' \
| egrep -A1 'star_[345]' \
| python3 -c "from fluent import *; import sys, re; from xml.sax.saxutils import unescape; _(sys.stdin.read()).findall(r'value=\'(.*)\'').map(unescape).map(print)"
But this still leaves the problem that it has to start with this fluff
`from fluent import *; import sys, re; from xml.sax.saxutils import unescape;`
This doesn't really do anything to make it easier to read and write and is almost half the characters
it took to achieve the wanted effect. Wouldn't it be nice if you could have
some kind of object (lets call it `lib` for lack of a better word), where you could just access the whole
python library via attribute access and let it's machinery handle importing behind the scenes?
Like this:
$ curl -sL 'https://www.iblocklist.com/lists.php' | egrep -A1 'star_[345]' | python3 -m fluent "lib.sys.stdin.read().findall(r'value=\'(.*)\'').map(lib.xml.sax.saxutils.unescape).map(print)"
How's that for reading and writing if all the imports are inlined? Oh, and of course everything imported
via `lib` comes already pre-wrapped, so your code becomes even shorter.
More formally:The `lib` object, which is a wrapper around the python import machinery, allows to import
anything that is accessible by import to be imported as an expression for inline use.
So instead of
>>> import sys
>>> input = sys.stdin.read()
You can do
>>> input = lib.sys.stdin.read()
As a bonus, everything imported via lib is already pre-wrapped, so you can chain off of it immediately.
`lib` is also available on `_` which is itself just an alias for `wrap`. This is usefull if you want
to import fewer symbols from fluent or want to import the library under a custom name
>>> from fluent import _ # alias for wrap
>>> _.lib.sys.stdin.split('\n').map(str.upper).map(print)
>>> from fluent import _ as fluent # alias for wrap
>>> fluent.lib.sys.stdin.split('\n').map(str.upper).map(print)
Not sure if that is so super usefull though, as you could also just do:
>>> import fluent
>>> fluent.lib.sys.stdin.split('\n').map(str.upper).map(print)
## Generating lambda's from expressions
`lambda` is great - it's often exactly what the doctor ordered. But it can also be a bit annyoing
if you have to write it down everytime you just want to get an attribute or call a method on every
object in a collection.
>>> _([dict(fnord='foo'), dict(fnord='bar')]).map(lambda each: each['fnord']) == ['foo', 'bar]
>>> class Foo(object):
>>> attr = 'attrvalue'
>>> def method(self, arg): return 'method+'+arg
>>> _([Foo(), Foo()]).map(lambda each: each.attr) == ['attrvalue', 'attrvalue']
>>> _([Foo(), Foo()]).map(lambda each: each.method('arg')) == ['method+arg', 'method+arg']
Sure it works, but wouldn't it be nice if we could save a variable and do this a bit shorter?
I mean, python does have attrgetter, itemgetter and methodcaller - they are just a bit
inconvenient to use:
>>> from operator import itemgetter, attrgetter, methodcaller
>>> _([dict(fnord='foo'), dict(fnord='bar')]).map(itemgetter('fnord')) == ['foo', 'bar]
>>> class Foo(object):
>>> attr = 'attrvalue'
>>> def method(self, arg): return 'method+'+arg
>>> _([Foo(), Foo()]).map(attrgetter(attr)) == ['attrvalue', 'attrvalue']
>>> _([Foo(), Foo()]).map(methodcaller(method, 'arg')) == ['method+arg', 'method+arg']
So there is an object `_.each` that just exposes a bit of syntactic shugar for these
(and a few operators). Basically, everything you do to `_.each` it will do to each object
in the collection:
>>> _([1,2,3]).map(_.each + 3) == [4,5,6]
>>> _([1,2,3]).filter(_.each < 3) == [1,2]
>>> _([1,2,3]).map(- _.each) == [-1,-2,-3]
>>> _([dict(fnord='foo'), dict(fnord='bar')]).map(_.each['fnord']) == ['foo', 'bar]
>>> class Foo(object):
>>> attr = 'attrvalue'
>>> def method(self, arg): return 'method+'+arg
>>> _([Foo(), Foo()]).map(_.each.attr) == ['attrvalue', 'attrvalue']
>>> _([Foo(), Foo()]).map(_.each.call.method('arg')) == ['method+arg', 'method+arg']
Yeah I know `_.each.call.*()` is crude - but I haven't found a good syntax to get rid of
the .call yet. Feedback welcome.
## Chaining off of methods that return None
A major nuissance for using fluent interfaces are methods that return None. Now this is mostly
a feature of python, where methods that don't have a return statement return None.
While this is way better than e.g. Ruby where that will just return the value of the last
expression - which means objects constantly leak internals, it is very annoying if you want to
chain off of one of these method calls. Fear not though, fluent has you covered. :)
Fluent wrapped objects will behave more like SmallTalk objects, in that they pretend
that every method that returns None actually returned self - thus allowing chaining. So this just works:
>>> _([3,2,1]).sort().reverse().call(print)
Even though both sort() and reverse() return None
Of course, if you unwrap at any point with `.unwrap` or `._` you will get the true return value of `None`.
# Famous Last Words
This library tries to do a little of what underscore does for javascript. Just provide the missing glue to make the standard library nicer and easier to use - especially for short oneliners or short script. Have fun!
While I know that this is not something you want to use in big projects (see warning at the beginning)
I envision this to be very usefull in quick python scripts and shell one liner filters, where python was previously just that little bit too hard to use, that 'overflowed the barrel' and prevented you from doing so.
"""
"""Future Ideas:
# TODO consider numeric type to do stuff like wrap(3).times(...)
or wrap([1,2,3]).call(len).times(yank_me)
Rework _.each.call.foo(bar) so 'call' is no longer a used-up symbol on each.
Also _.each.call.method(...) has a somewhat different meaning as the .call method on callable
could _.each.method(_, ...) work when auto currying is enabled?
Rework fluent so explicit unwrapping is required to do anythign with wrapped objects.
(Basically calling ._ at the end)
The idea here is that this would likely enable the library to be used in big / bigger
projects as it looses it's virus like qualities.
* Maybe this is best done as a separate import?
* This would also be a chance to consider always using the iterator versions of
all the collection methods under their original name and automatically unpacking
/ triggering the iteration on ._? Not sure that's a great idea, as getting the
iterator to abstract over it is a) great and b) triggering the iteration is also
hard see e.g. groupby.
* This would require carefull analysis where wrapped objects are handed out as arguments
to called methods e.g. .tee(). Also requires __repr__ and __str__ implementations that
make sense.
Roundable (for all numeric needs?)
round, times, repeat, if_true, if_false, else_
if_true, etc. are pretty much like conditional versions of .tee() I guess.
.if_true(function_to_call).else_(other_function_to_call)
"""
# REFACT rename wrap -> fluent? perhaps as an alias?
__all__ = [
'wrap', # generic wrapper factory that returns the appropriate subclass in this package according to what is wrapped
'_', # _ is an alias for wrap
'lib', # wrapper for python import machinery, access every importable package / function directly on this via attribute access
]
import typing
import re
import math
import types
import functools
import itertools
import operator
import collections.abc
def wrap(wrapped, *, previous=None, chain=None):
"""Factory method, wraps anything and returns the appropriate Wrapper subclass.
This is the main entry point into the fluent wonderland. Wrap something and
everything you call off of that will stay wrapped in the apropriate wrappers.
"""
if isinstance(wrapped, Wrapper):
return wrapped
by_type = (
(types.ModuleType, Module),
(typing.Text, Text),
(typing.Mapping, Mapping),
(typing.AbstractSet, Set),
(typing.Iterable, Iterable),
(typing.Callable, Callable),
)
if wrapped is None and chain is None and previous is not None:
chain = previous.chain
decider = wrapped
if wrapped is None and chain is not None:
decider = chain
for clazz, wrapper in by_type:
if isinstance(decider, clazz):
return wrapper(wrapped, previous=previous, chain=chain)
return Wrapper(wrapped, previous=previous, chain=chain)
# sadly _ is pretty much the only valid python identifier that is sombolic and easy to type. Unicode would also be a candidate, but hard to type $, § like in js cannot be used
_ = wrap
def wrapped(wrapped_function, additional_result_wrapper=None, self_index=0):
"""
Using these decorators will take care of unwrapping and rewrapping the target object.
Thus all following code is written as if the methods live on the wrapped object
Also perfect to adapt free functions as instance methods.
"""
@functools.wraps(wrapped_function)
def wrapper(self, *args, **kwargs):
result = wrapped_function(*args[0:self_index], self.chain, *args[self_index:], **kwargs)
if callable(additional_result_wrapper):
result = additional_result_wrapper(result)
return wrap(result, previous=self)
return wrapper
def unwrapped(wrapped_function):
"""Like wrapped(), but doesn't wrap the result.
Use this to adapt free functions that should not return a wrapped value"""
@functools.wraps(wrapped_function)
def forwarder(self, *args, **kwargs):
return wrapped_function(self.chain, *args, **kwargs)
return forwarder
def wrapped_forward(wrapped_function, additional_result_wrapper=None, self_index=1):
"""Forwards a call to a different object
This makes its method available on the wrapper.
This specifically models the case where the method forwarded to,
takes the current object as its first argument.
This also deals nicely with methods that just live on the wrong object.
"""
return wrapped(wrapped_function, additional_result_wrapper=additional_result_wrapper, self_index=self_index)
def tupleize(wrapped_function):
""""Wrap the returned obect in a tuple to force execution of iterators.
Especially usefull to de-iterate methods / function
"""
@functools.wraps(wrapped_function)
def wrapper(self, *args, **kwargs):
return wrap(tuple(wrapped_function(self, *args, **kwargs)), previous=self)
return wrapper
class Wrapper(object):
"""Universal wrapper.
This class ensures that all function calls and attribute accesses
that can be caught in python will be wrapped with the wrapper again.
This ensures that the fluent interface will persist and everything
that is returned is itself able to be chaned from again.
Using this wrapper changes the behaviour of python soure code in quite a big way.
a) If you wrap something, if you want to get at the real object from any
function call or attribute access off of that object, you will have to
explicitly unwrap it.
b) All returned objects will be enhanced by behaviour that matches the
wrapped type. I.e. iterables will gain the collection interface,
mappings will gain the mapping interface, strings will gain the
string interface, etc.
"""
def __init__(self, wrapped, *, previous, chain):
assert wrapped is not None or chain is not None, 'Cannot chain off of None'
self.__wrapped = wrapped
self.__previous = previous
self.__chain = chain
# Proxied methods
__getattr__ = wrapped(getattr)
__getitem__ = wrapped(operator.getitem)
def __str__(self):
return "fluent.wrap(%s)" % self.chain
def __repr__(self):
return "fluent.wrap(%r)" % self.chain
# REFACT consider wether I want to support all other operators too or wether explicit
# unwrapping is actually a better thing
__eq__ = unwrapped(operator.eq)
# Breakouts
@property
def unwrap(self):
return self.__wrapped
_ = unwrap # alias
@property
def previous(self):
return self.__previous
@property
def chain(self):
"Like .unwrap but handles chaining off of methods / functions that return None like SmallTalk does"
if self.unwrap is not None:
return self.unwrap
return self.__chain
# Utilities
@wrapped
def call(self, function, *args, **kwargs):
"Call function with self as first argument"
# Different from __call__! Calls function(self, …) instead of self(…)
return function(self, *args, **kwargs)
setattr = wrapped(setattr)
getattr = wrapped(getattr)
hasattr = wrapped(hasattr)
delattr = wrapped(delattr)
isinstance = wrapped(isinstance)
issubclass = wrapped(issubclass)
def tee(self, function):
"""Like tee on the shell
Calls the argument function with self, but then discards the result and allows
further chaining from self."""
function(self)
return self
dir = wrapped(dir)
vars = wrapped(vars)
# REFACT consider to use wrap as the placeholder to have less symbols? Probably not worth it...
virtual_root_module = object()
class Module(Wrapper):
"""Importer shortcut.
All attribute accesses to instances of this class are converted to
an import statement, but as an expression that returns the wrapped imported object.
Example:
>>> lib.sys.stdin.read().map(print)
Is equivalent to
>>> import importlib
>>> wrap(importlib.import_module('sys').stdin).read().map(print)
But of course without creating the intermediate symbol 'stdin' in the current namespace.
All objects returned from lib are pre-wrapped, so you can chain off of them immediately.
"""
def __getattr__(self, name):
if hasattr(self.chain, name):
return wrap(getattr(self.chain, name))
import importlib
module = None
if self.chain is virtual_root_module:
module = importlib.import_module(name)
else:
module = importlib.import_module('.'.join((self.chain.__name__, name)))
return wrap(module)
wrap.lib = lib = Module(virtual_root_module, previous=None, chain=None)
class Callable(Wrapper):
def __call__(self, *args, **kwargs):
""""Call through with a twist.
If one of the args is `wrap` / `_`, then this acts as a shortcut to curry instead"""
# REFACT consider to drop the auto curry - doesn't look like it is so super usefull
# REFACT Consider how to expand this so every method in the library supports auto currying
if wrap in args:
return self.curry(*args, **kwargs)
result = self.chain(*args, **kwargs)
chain = None if self.previous is None else self.previous.chain
return wrap(result, previous=self, chain=chain)
# REFACT rename to partial for consistency with stdlib?
# REFACT consider if there could be more utility in supporting placeholders for more usecases.
# examples:
# Switching argument order?
@wrapped
def curry(self, *curry_args, **curry_kwargs):
""""Like functools.partial, but with a twist.
If you use `wrap` or `_` as a positional argument, upon the actual call,
arguments will be left-filled for those placeholders.
For example:
>>> _(operator.add).curry(_, 'foo')('bar') == 'barfoo'
"""
placeholder = wrap
def merge_args(curried_args, args):
assert curried_args.count(placeholder) == len(args), \
'Need the right ammount of arguments for the placeholders'
new_args = list(curried_args)
if placeholder in curried_args:
index = 0
for arg in args:
index = new_args.index(placeholder, index)
new_args[index] = arg
return new_args
@functools.wraps(self)
def wrapper(*actual_args, **actual_kwargs):
return self(
*merge_args(curry_args, actual_args),
**dict(curry_kwargs, **actual_kwargs)
)
return wrapper
@wrapped
def compose(self, outer):
return lambda *args, **kwargs: outer(self(*args, **kwargs))
# REFACT consider aliasses wrap = chain = cast = compose
class Iterable(Wrapper):
"""Add iterator methods to any iterable.
Most iterators in python3 return an iterator by default, which is very interesting
if you want to build efficient processing pipelines, but not so hot for quick and
dirty scripts where you have to wrap the result in a list() or tuple() all the time
to actually get at the results (e.g. to print them) or to actually trigger the
computation pipeline.
Thus all iterators on this class are by default immediate, i.e. they don't return the
iterator but instead consume it immediately and return a tuple. Of course if needed,
there is also an i{map,zip,enumerate,...} version for your enjoyment that returns the
iterator.
"""
__iter__ = unwrapped(iter)
@wrapped
def star_call(self, function, *args, **kwargs):
"Calls function(*self), but allows to prepend args and add kwargs."
return function(*args, *self, **kwargs)
# This looks like it should be the same as
# starcall = wrapped(lambda function, wrapped, *args, **kwargs: function(*wrapped, *args, **kwargs))
# but it's not. Why?
@wrapped
def join(self, with_what):
""""Like str.join, but the other way around. Bohoo!
Also calls str on all elements of the collection before handing
it off to str.join as a convenience.
"""
return with_what.join(map(str, self))
## Reductors .........................................
len = wrapped(len)
max = wrapped(max)
min = wrapped(min)
sum = wrapped(sum)
any = wrapped(any)
all = wrapped(all)
reduce = wrapped_forward(functools.reduce)
## Iterators .........................................
imap = wrapped_forward(map)
map = tupleize(imap)
istar_map = istarmap = wrapped_forward(itertools.starmap)
star_map = starmap = tupleize(istarmap)
ifilter = wrapped_forward(filter)
filter = tupleize(ifilter)
ienumerate = wrapped(enumerate)
enumerate = tupleize(ienumerate)
ireversed = wrapped(reversed)
reversed = tupleize(ireversed)
isorted = wrapped(sorted)
sorted = tupleize(isorted)
@wrapped
def igrouped(self, group_length):
"s -> (s0,s1,s2,...sn-1), (sn,sn+1,sn+2,...s2n-1), (s2n,s2n+1,s2n+2,...s3n-1), ..."
return zip(*[iter(self)]*group_length)
grouped = tupleize(igrouped)
izip = wrapped(zip)
zip = tupleize(izip)
@wrapped
def iflatten(self, level=math.inf):
"Modeled after rubys array.flatten @see http://ruby-doc.org/core-1.9.3/Array.html#method-i-flatten"
for element in self:
if level > 0 and isinstance(element, typing.Iterable):
for subelement in _(element).iflatten(level=level-1):
yield subelement
else:
yield element
return
flatten = tupleize(iflatten)
igroupby = wrapped(itertools.groupby)
def groupby(self, *args, **kwargs):
# Need an extra wrapping function to consume the deep iterators in time
result = []
for key, values in self.igroupby(*args, **kwargs):
result.append((key, tuple(values)))
return wrap(tuple(result))
def tee(self, function):
"This override tries to retain iterators, as a speedup"
if hasattr(self.chain, '__next__'): # iterator
first, second = itertools.tee(self.chain, 2)
function(wrap(first, previous=self))
return wrap(second, previous=self)
else:
return super().tee(function)
class Mapping(Iterable):
def __getattr__(self, name):
"Support JavaScript like dict item access via attribute access"
if name in self.chain:
return self[name]
return super().__getattr__(self, name)
@wrapped
def star_call(self, function, *args, **kwargs):
"Calls function(**self), but allows to add args and set defaults for kwargs."
return function(*args, **dict(kwargs, **self))
class Set(Iterable): pass
# REFACT consider to inherit from Iterable? It's how Python works...
class Text(Wrapper):
"Supports most of the regex methods as if they where native str methods"
# Regex Methods ......................................
search = wrapped_forward(re.search)
match = wrapped_forward(re.match)
fullmatch = wrapped_forward(re.match)
split = wrapped_forward(re.split)
findall = wrapped_forward(re.findall)
# REFACT consider ifind and find in the spirit of the collection methods?
finditer = wrapped_forward(re.finditer)
sub = wrapped_forward(re.sub, self_index=2)
subn = wrapped_forward(re.subn, self_index=2)
def make_operator(name):
__op__ = getattr(operator, name)
@functools.wraps(__op__)
def wrapper(self, *others):
return wrap(__op__).curry(wrap, *others)
return wrapper
class Each(Wrapper):
for name in dir(operator):
if not name.startswith('__'):
continue
locals()[name] = make_operator(name)
@wrapped
def __getattr__(self, name):
return operator.attrgetter(name)
@wrapped
def __getitem__(self, index):
return operator.itemgetter(index)
@property
def call(self):
class MethodCallerConstructor(object):
_method_name = None
def __getattr__(self, method_name):
self._method_name = method_name
return self
def __call__(self, *args, **kwargs):
assert self._method_name is not None, \
'Need to access the method to call first! E.g. _.each.call.method_name(arg1, kwarg="arg2")'
return wrap(operator.methodcaller(self._method_name, *args, **kwargs))
return MethodCallerConstructor()
each_marker = object()
wrap.each = Each(each_marker, previous=None, chain=None)
import unittest
from pyexpect import expect
import pytest
class FluentTest(unittest.TestCase): pass
class WrapperTest(FluentTest):
def test_should_not_wrap_a_wrapper_again(self):
wrapped = _(4)
expect(type(_(wrapped).unwrap)) == int
def test_should_provide_usefull_str_and_repr_output(self):
expect(repr(_('foo'))) == "fluent.wrap('foo')"
expect(str(_('foo'))) == "fluent.wrap(foo)"
def test_should_wrap_callables(self):
counter = [0]
def foo(): counter[0] += 1
expect(_(foo)).is_instance(Wrapper)
_(foo)()
expect(counter[0]) == 1
def test_should_wrap_attribute_accesses(self):
class Foo(): bar = 'baz'
expect(_(Foo()).bar).is_instance(Wrapper)
def test_should_wrap_item_accesses(self):
expect(_(dict(foo='bar'))['foo']).is_instance(Wrapper)
def test_should_error_when_accessing_missing_attribute(self):
class Foo(): pass
expect(lambda: _(Foo().missing)).to_raise(AttributeError)
def test_should_explictly_unwrap(self):
foo = 1
expect(_(foo).unwrap).is_(foo)
def test_should_wrap_according_to_returned_type(self):
expect(_('foo')).is_instance(Text)
expect(_([])).is_instance(Iterable)
expect(_(iter([]))).is_instance(Iterable)
expect(_({})).is_instance(Mapping)
expect(_({1})).is_instance(Set)
expect(_(lambda: None)).is_instance(Callable)
class CallMe(object):
def __call__(self): pass
expect(_(CallMe())).is_instance(Callable)
expect(_(object())).is_instance(Wrapper)
def test_should_remember_call_chain(self):
def foo(): return 'bar'
expect(_(foo)().unwrap) == 'bar'
expect(_(foo)().previous.unwrap) == foo
def test_should_delegate_equality_test_to_wrapped_instance(self):
# REFACT makes these tests much nicer - but probably has to go to make this library less virus like
expect(_(1)) == 1
expect(_('42')) == '42'
callme = lambda: None
expect(_(callme)) == callme
def test_hasattr_getattr_setattr_delattr(self):
expect(_((1,2)).hasattr('len'))
expect(_('foo').getattr('__len__')()) == 3
class Attr(object):
def __init__(self): self.foo = 'bar'
expect(_(Attr()).setattr('foo', 'baz').foo) == 'baz'
expect(_(Attr()).delattr('foo').unwrap) == None
expect(_(Attr()).delattr('foo').chain).isinstance(Attr)
expect(_(Attr()).delattr('foo').vars()) == {}
def test_isinstance_issubclass(self):
expect(_('foo').isinstance(str)) == True
expect(_('foo').isinstance(int)) == False
expect(_(str).issubclass(object)) == True
expect(_(str).issubclass(str)) == True
expect(_(str).issubclass(int)) == False
def test_dir_vars(self):
expect(_(object()).dir()).contains('__class__', '__init__', '__eq__')
class Foo(object): pass
foo = Foo()
foo.bar = 'baz'
expect(_(foo).vars()) == {'bar': 'baz'}
class CallableTest(FluentTest):
def test_call(self):
expect(_(lambda: 3)()) == 3
expect(_(lambda *x: x)(1,2,3)) == (1,2,3)
expect(_(lambda x=3: x)()) == 3
expect(_(lambda x=3: x)(x=4)) == 4
expect(_(lambda x=3: x)(4)) == 4
def test_star_call(self):
expect(wrap([1,2,3]).star_call(str.format, '{} - {} : {}')) == '1 - 2 : 3'
def test_should_call_callable_with_wrapped_as_first_argument(self):
expect(_([1,2,3]).call(min)) == 1
expect(_([1,2,3]).call(min)) == 1
expect(_('foo').call(str.upper)) == 'FOO'
expect(_('foo').call(str.upper)) == 'FOO'
def test_tee_breakout_a_function_with_side_effects_and_disregard_return_value(self):
side_effect = {}
def observer(a_list): side_effect['tee'] = a_list.join('-')
expect(_([1,2,3]).tee(observer)) == [1,2,3]
expect(side_effect['tee']) == '1-2-3'
def fnording(ignored): return 'fnord'
expect(_([1,2,3]).tee(fnording)) == [1,2,3]
def test_curry(self):
expect(_(lambda x, y: x*y).curry(2, 3)()) == 6
expect(_(lambda x=1, y=2: x*y).curry(x=3)()) == 6
def test_auto_currying(self):
expect(_(lambda x: x + 3)(_)(3)) == 6
expect(_(lambda x, y: x + y)(_, 'foo')('bar')) == 'barfoo'
expect(_(lambda x, y: x + y)('foo', _)('bar')) == 'foobar'
def test_curry_should_support_placeholders_to_curry_later_positional_arguments(self):
expect(_(operator.add).curry(_, 'foo')('bar')) == 'barfoo'
expect(_(lambda x, y, z: x + y + z).curry(_, 'baz', _)('foo', 'bar')) == 'foobazbar'
# expect(_(operator.add).curry(_2, _1)('foo', 'bar')) == 'barfoo'
def test_compose_cast_wraps_chain(self):
expect(_(lambda x: x*2).compose(lambda x: x+3)(5)) == 13
expect(_(str.strip).compose(str.capitalize)(' fnord ')) == 'Fnord'
class SmallTalkLikeBehaviour(FluentTest):
def test_should_pretend_methods_that_return_None_returned_self(self):
expect(_([3,2,1]).sort().unwrap) == None
expect(_([3,2,1]).sort().previous.previous) == [1,2,3]
expect(_([3,2,1]).sort().chain) == [1,2,3]
expect(_([2,3,1]).sort().sort(reverse=True).unwrap) == None
expect(_([2,3,1]).sort().sort(reverse=True).previous.previous.previous.previous) == [3,2,1]
expect(_([2,3,1]).sort().sort(reverse=True).chain) == [3,2,1]
def test_should_chain_off_of_previous_if_our_functions_return_none(self):
class Attr(object):
foo = 'bar'
expect(_(Attr()).setattr('foo', 'baz').foo) == 'baz'
# TODO check individually that the different forms of wrapping behave according to the SmallTalk contract
# wrapped
# unwrapped
# wrapped_forward
class IterableTest(FluentTest):
def test_should_call_callable_with_star_splat_of_self(self):
expect(_([1,2,3]).star_call(lambda x, y, z: z-x-y)) == 0
def test_join(self):
expect(_(['1','2','3']).join(' ')) == '1 2 3'
expect(_([1,2,3]).join(' ')) == '1 2 3'
def test_any(self):
expect(_((True, False)).any()) == True
expect(_((False, False)).any()) == False
def test_all(self):
expect(_((True, False)).all()) == False
expect(_((True, True)).all()) == True
def test_len(self):
expect(_((1,2,3)).len()) == 3
def test_min_max_sum(self):
expect(_([1,2]).min()) == 1
expect(_([1,2]).max()) == 2
expect(_((1,2,3)).sum()) == 6
def test_map(self):
expect(_([1,2,3]).imap(lambda x: x * x).call(list)) == [1, 4, 9]
expect(_([1,2,3]).map(lambda x: x * x)) == (1, 4, 9)
def test_starmap(self):
expect(_([(1,2), (3,4)]).istarmap(lambda x, y: x+y).call(list)) == [3, 7]
expect(_([(1,2), (3,4)]).starmap(lambda x, y: x+y)) == (3, 7)
def test_filter(self):
expect(_([1,2,3]).ifilter(lambda x: x > 1).call(list)) == [2,3]
expect(_([1,2,3]).filter(lambda x: x > 1)) == (2,3)
def test_zip(self):
expect(_((1,2)).izip((3,4)).call(tuple)) == ((1, 3), (2, 4))
expect(_((1,2)).izip((3,4), (5,6)).call(tuple)) == ((1, 3, 5), (2, 4, 6))
expect(_((1,2)).zip((3,4))) == ((1, 3), (2, 4))
expect(_((1,2)).zip((3,4), (5,6))) == ((1, 3, 5), (2, 4, 6))
def test_reduce(self):
# no iterator version of reduce as it's not a mapping
expect(_((1,2)).reduce(operator.add)) == 3
def test_grouped(self):
expect(_((1,2,3,4,5,6)).igrouped(2).call(list)) == [(1,2), (3,4), (5,6)]
expect(_((1,2,3,4,5,6)).grouped(2)) == ((1,2), (3,4), (5,6))
expect(_((1,2,3,4,5)).grouped(2)) == ((1,2), (3,4))
def test_group_by(self):
actual = {}
for key, values in _((1,1,2,2,3,3)).igroupby():
actual[key] = tuple(values)
expect(actual) == {
1: (1,1),
2: (2,2),
3: (3,3)
}
actual = {}
for key, values in _((1,1,2,2,3,3)).groupby():
actual[key] = tuple(values)
expect(actual) == {
1: (1,1),
2: (2,2),
3: (3,3)
}
def test_tee_should_not_break_iterators(self):
# This should work because the extend as well als the .call(list)
# should not exhaust the iterator created by .imap()
recorder = []
def record(generator): recorder.extend(generator)
expect(_([1,2,3]).imap(lambda x: x*x).tee(record).call(list)) == [1,4,9]
expect(recorder) == [1,4,9]
def test_enumerate(self):
expect(_(('foo', 'bar')).ienumerate().call(list)) == [(0, 'foo'), (1, 'bar')]
expect(_(('foo', 'bar')).enumerate()) == ((0, 'foo'), (1, 'bar'))
def test_reversed_sorted(self):
expect(_([2,1,3]).ireversed().call(list)) == [3,1,2]
expect(_([2,1,3]).reversed()) == (3,1,2)
expect(_([2,1,3]).isorted().call(list)) == [1,2,3]
expect(_([2,1,3]).sorted()) == (1,2,3)
expect(_([2,1,3]).isorted(reverse=True).call(list)) == [3,2,1]
expect(_([2,1,3]).sorted(reverse=True)) == (3,2,1)
def test_flatten(self):
expect(_([(1,2),[3,4],(5, [6,7])]).iflatten().call(list)) == \
[1,2,3,4,5,6,7]
expect(_([(1,2),[3,4],(5, [6,7])]).flatten()) == \
(1,2,3,4,5,6,7)
expect(_([(1,2),[3,4],(5, [6,7])]).flatten(level=1)) == \
(1,2,3,4,5,[6,7])
class MappingTest(FluentTest):
def test_should_call_callable_with_double_star_splat_as_keyword_arguments(self):
def foo(*, foo): return foo
expect(_(dict(foo='bar')).star_call(foo)) == 'bar'
expect(_(dict(foo='baz')).star_call(foo, foo='bar')) == 'baz'
expect(_(dict()).star_call(foo, foo='bar')) == 'bar'
def test_should_support_attribute_access_to_mapping_items(self):
expect(_(dict(foo='bar')).foo) == 'bar'
class StrTest(FluentTest):
def test_search(self):
expect(_('foo bar baz').search(r'b.r').span()) == (4,7)
def test_match_fullmatch(self):
expect(_('foo bar').match(r'foo\s').span()) == (0, 4)
expect(_('foo bar').fullmatch(r'foo\sbar').span()) == (0, 7)
def test_split(self):
expect(_('foo\nbar\nbaz').split(r'\n')) == ['foo', 'bar', 'baz']
expect(_('foo\nbar/baz').split(r'[\n/]')) == ['foo', 'bar', 'baz']
def test_findall_finditer(self):
expect(_("bazfoobar").findall('ba[rz]')) == ['baz', 'bar']
expect(_("bazfoobar").finditer('ba[rz]').map(_.each.call.span())) == ((0,3), (6,9))
def test_sub_subn(self):
expect(_('bazfoobar').sub(r'ba.', 'foo')) == 'foofoofoo'
expect(_('bazfoobar').sub(r'ba.', 'foo', 1)) == 'foofoobar'
expect(_('bazfoobar').sub(r'ba.', 'foo', count=1)) == 'foofoobar'
class ImporterTest(FluentTest):
def test_import_top_level_module(self):
import sys
expect(lib.sys) == sys
def test_import_symbol_from_top_level_module(self):
import sys
expect(lib.sys.stdin) == sys.stdin
def test_import_submodule_that_is_also_a_symbol_in_the_parent_module(self):
import os
expect(lib.os.name) == os.name
expect(lib.os.path.join) == os.path.join
def test_import_submodule_that_is_not_a_symbol_in_the_parent_module(self):
import dbm
expect(lambda: dbm.dumb).to_raise(AttributeError)
def delayed_import():
import dbm.dumb
return dbm.dumb
expect(lib.dbm.dumb) == delayed_import()
def test_imported_objects_are_pre_wrapped(self):
lib.os.path.join('/foo', 'bar', 'baz').findall(r'/(\w*)') == ['foo', 'bar', 'baz']
class EachTest(FluentTest):
def test_should_produce_attrgetter_on_attribute_access(self):
class Foo(object):
bar = 'baz'
expect(_([Foo(), Foo()]).map(_.each.bar)) == ('baz', 'baz')
def test_should_produce_itemgetter_on_item_access(self):
expect(_([['foo'], ['bar']]).map(_.each[0])) == ('foo', 'bar')
def test_should_produce_callable_on_binary_operator(self):
expect(_(['foo', 'bar']).map(_.each == 'foo')) == (True, False)
expect(_([3, 5]).map(_.each + 3)) == (6, 8)
expect(_([3, 5]).map(_.each < 4)) == (True, False)
def test_should_produce_callable_on_unary_operator(self):
expect(_([3, 5]).map(- _.each)) == (-3, -5)
expect(_([3, 5]).map(~ _.each)) == (-4, -6)
def test_should_produce_methodcaller_on_call_attribute(self):
# problem: _.each.call is now not an attrgetter
# _.each.method.call('foo') # like a method chaining
# _.each_call.method('foo')
# _.eachcall.method('foo')
class Tested(object):
def method(self, arg): return 'method+'+arg
expect(_(Tested()).call(_.each.call.method('argument'))) == 'method+argument'
expect(lambda: _.each.call('argument')).to_raise(AssertionError, '_.each.call.method_name')
class IntegrationTest(FluentTest):
def test_extrac_and_decode_URIs(self):
from xml.sax.saxutils import unescape
line = '''<td><img src='/sitefiles/star_5.png' height='15' width='75' alt=''></td>
<td><input style='width:200px; outline:none; border-style:solid; border-width:1px; border-color:#ccc;' type='text' id='ydxerpxkpcfqjaybcssw' readonly='readonly' onClick="select_text('ydxerpxkpcfqjaybcssw');" value='http://list.iblocklist.com/?list=ydxerpxkpcfqjaybcssw&fileformat=p2p&archiveformat=gz'></td>'''
actual = _(line).findall(r'value=\'(.*)\'').imap(unescape).call(list)
expect(actual) == ['http://list.iblocklist.com/?list=ydxerpxkpcfqjaybcssw&fileformat=p2p&archiveformat=gz']
def test_call_module_from_shell(self):
from subprocess import check_output
output = check_output(
['python', '-m', 'fluent', "lib.sys.stdin.read().split('\\n').imap(str.upper).imap(print).call(list)"],
input=b'foo\nbar\nbaz')
expect(output) == b'FOO\nBAR\nBAZ\n'
if __name__ == '__main__':
import sys
assert len(sys.argv) == 2, \
"Usage: python -m fluent 'some code that can access fluent functions without having to import them'"
exec(sys.argv[1], dict(wrap=wrap, _=_, lib=lib))
| dwt/BayesianNetworks | fluent.py | Python | mit | 46,253 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-12-16 14:36
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('movies', '0021_auto_20161216_1406'),
]
operations = [
migrations.RenameField(
model_name='movie',
old_name='tmdb_id_not',
new_name='search',
),
]
| pdevetto/super-duper-disco | movies/migrations/0022_auto_20161216_1436.py | Python | gpl-3.0 | 429 |
def get_description():
"""Return random veather. Just like the pros"""
from random import choice
possibilities = ['rain', 'snow', 'sleet', 'fog',
'sun', 'who knows']
return choice(possibilities) | serggrom/python-projects | report.py | Python | gpl-3.0 | 231 |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import sys
import os
import json
SPLIT_CHAR = '#'
MSG_SPLIT_CHAR = '@'
MIN_BETWEEN_BEFORE_AFTER = 1
if len(sys.argv) < 3:
print "引数が足りない(<infile1> <infile2>)"
sys.exit(1)
in_file_1 = sys.argv[1]
in_file_2 = sys.argv[2]
out_file = "combined_file.txt"
wf = open(out_file, 'w')
last_ts = 0
with open(in_file_1) as rf:
last_line = ""
for line in rf:
wf.write(line)
last_line = line
_,op_line = last_line.split(MSG_SPLIT_CHAR)
op_dict = json.loads(op_line)
last_ts = long(op_dict['ts'])
rf = open(in_file_2, 'r')
# MIN_BETWEEN_BEFORE_AFTER秒(x * 10^7 * 100ナノ秒)の間隔をあける
currentTs = 0
exCurrentTs = 0
newCurrentTs = last_ts + MIN_BETWEEN_BEFORE_AFTER * (10 ** 7)
firstOperation = True
for line in rf:
line = line.replace('\n', '')
if line == '': continue
#print line
op_name,param = line.split(MSG_SPLIT_CHAR)
if op_name == "OPERATION":
op_dict = json.loads(param)
ts_str = op_dict["ts"]
currentTs = long(op_dict["ts"])
diffTs = currentTs - exCurrentTs
if firstOperation:
diffTs = 0
firstOperation = False
newCurrentTs += diffTs
line = line.replace(ts_str, str(newCurrentTs))
wf.write(line + "\n")
exCurrentTs = currentTs
rf.close()
wf.close()
print 'Output file: "%s"' % out_file
| supertask/ChainX | server/websocket/recorded_operations/combiner.py | Python | mit | 1,425 |
import datetime
#See also:
#def fixdate(d):
#return u"%s-%s-%sT%s:%s:%s"%(d[0:4], d[4:6], d[6:8], d[8:10], d[10:12], d[12:14])
# return u"%s-%s-%s"%(d[0:4], d[4:6], d[6:8])
def webfeed(body):
import feedparser
#Abstracted from Akara demo/modules/atomtools.py
feed = feedparser.parse(body)
from akara import logger; logger.info('%i entries: '%len(feed.entries))
def process_entry(e):
#from pprint import pformat; from akara import logger; logger.info('webfeed entry: ' + repr(pformat(dict(e)))); logger.info('webfeed entry: ' + repr(pformat(e.__dict__)))
data = {}
if hasattr(e, 'link'):
data[u'id'] = e.link
data[u'link'] = e.link
if hasattr(e, 'summary'):
data[u'description'] = e.summary
if hasattr(e, 'title'):
data[u'title'] = e.title
data[u'label'] = e.title
if hasattr(e, 'author_detail'):
data[u'author_name'] = e.author_detail.name
if hasattr(e, 'updated_parsed'):
data[u'updated'] = datetime.datetime(*e.updated_parsed[:7]).isoformat().split('.')[0]
if hasattr(e, 'tags'):
data[u'tags'] = [ t['term'] for t in e.tags ]
return data
return [ process_entry(e) for e in feed.entries ] if feed.entries else None
| dpla/zen | lib/feeds.py | Python | apache-2.0 | 1,327 |
from setuptools import setup, find_packages
setup(
name="Coinbox-mod-currency",
version="0.2",
packages=find_packages(),
zip_safe=True,
namespace_packages=['cbmod'],
include_package_data=True,
install_requires=[
'sqlalchemy>=0.7, <1.0',
'PyDispatcher>=2.0.3, <3.0',
'ProxyTypes>=0.9, <1.0',
'Babel>=1.3, <2.0',
'PySide>=1.0,<2.0'
],
author='Coinbox POS Team',
author_email='[email protected]',
description='Coinbox POS currency module',
license='MIT',
url='http://coinboxpos.org/'
)
| coinbox/coinbox-mod-currency | setup.py | Python | mit | 654 |
import jwt
from glassfrog.models import Installation
from flask import escape
import Levenshtein
def createMessageDict(color, message, message_format="html"):
message_dict = {
"color": color,
"message": str(message),
"notify": False,
"message_format": message_format
}
return message_dict
def getInstallationFromOauthId(oauthId):
installation = Installation.query.filter_by(oauthId=oauthId).first()
return installation
def getInstallationFromJWT(signed_request):
jwt_unverified = jwt.decode(signed_request,
options={'verify_signature': False, 'verify_exp': False})
oauthId = jwt_unverified['iss']
installation = getInstallationFromOauthId(oauthId)
secret = installation.oauthSecret
jwt.decode(signed_request, secret, algorithms=['HS256'])
return installation
def getLevenshteinDistance(item, keyword):
item = item.lower().replace(' ', '').replace('-', '').replace('_', '')
keyword = keyword.lower().replace(' ', '').replace('-', '').replace('_', '')
return Levenshtein.ratio(item, keyword)
def getMatchingCircle(circles, keyword):
closestDistance = 0
closestMatch = -999 # no match
for circle in circles:
for name in ['name', 'short_name']:
distance = getLevenshteinDistance(circle[name], keyword)
if distance > 0.5 and distance > closestDistance:
closestDistance = distance
closestMatch = circle['id']
return closestMatch
def getMatchingRole(roles, keyword):
closestDistance = 0
closestMatch = -999 # no match
for role in roles:
if not role['links']['supporting_circle']:
for name in ['name']:
distance = getLevenshteinDistance(role[name], keyword)
if distance > 0.5 and distance > closestDistance:
closestDistance = distance
closestMatch = role['id']
matchfound = True
return closestMatch
def makeMentionName(name):
mentionName = name.lower().replace(' ', '-')
return mentionName
| wardweistra/glassfrog-hipchat-bot | glassfrog/functions/messageFunctions.py | Python | lgpl-3.0 | 2,138 |
#!/usr/bin/python
########################################################################
# 15 May 2014
# Patrick Lombard, Centre for Stem Stem Research
# Core Bioinformatics Group
# University of Cambridge
# All right reserved.
########################################################################
import subprocess
import sys, os, re
import pybedtools
import pysam
import argparse
import operator
import pkg_resources
import pychiptools
import ConfigParser
def combine_sam_files(list_of_sams, outname):
count = 0
outsam = outname + "/" + outname + ".sam"
print "==> Combining sam files...\n"
for sam in list_of_sams:
original_file = pysam.Samfile(sam)
if count == 0:
new_file = pysam.Samfile(outsam, mode='wh', template=original_file)
for read in original_file:
new_file.write(read)
else:
for read in original_file:
new_file.write(read)
count += 1
def convert_sam_bed(sam, name, paired, outdir):
obed = "{}/{}_tmp.BED".format(outdir, name)
outbed = open(obed, "wb")
samfile = pysam.Samfile(sam, "r")
data = {}
count = 0
print "==> Converting sam to bed...\n"
for read in samfile.fetch():
count += 1
strand = '+'
if read.is_reverse :
strand = '-'
if strand == '+':
new_start = read.pos
new_end = int(read.pos) + 200
elif strand == '-':
new_end = read.aend
new_start = int(read.aend) - 200
if new_start <= 0 :
new_start = 1
new_end = 200
outbed.write("{}\t{}\t{}\t{}\t0\t{}\n".format(samfile.getrname(read.tid), new_start, new_end, read.qname, strand)),
outbed.close()
command = "sort -k1,1 -k2,2g -o {0}/{1}.BED {0}/{1}_tmp.BED".format(outdir, name)
subprocess.call(command.split())
subprocess.call(["rm", "{}/{}_tmp.BED".format(outdir, name)])
if paired:
count /= 2
return count
def change_for_ucsc(name, chromsizes, outdir, ens=False):
if ens:
outbed2 = open('{}/{}_tmp1.BED'.format(outdir, name), "w")
print "==> Converting Ensembl to UCSC chromosomes...\n"
with open("{}/{}.BED".format(outdir, name)) as f:
for line in f:
line = line.rstrip()
word = line.split("\t")
if re.match(r"^\d", word[0]):
new_chr = "chr" + word[0]
elif re.match(r"^X", word[0]):
new_chr = "chrX"
elif re.match(r"^Y", word[0]):
new_chr = "chrY"
elif word[0] == "MT":
new_chr = "chrM"
else:
pass
outbed2.write("{}\t{}\t{}\t{}\t{}\t{}\n".format(new_chr, word[1], word[2], word[3], word[4], word[5])),
outbed2.close()
command = "bedClip {0}/{1}_tmp1.BED {2} {0}/{1}_ucsc.BED".format(outdir, name, chromsizes)
subprocess.call(command.split())
command = "rm {}/{}_tmp1.BED".format(outdir, name)
subprocess.call(command.split())
else:
command = "bedClip {0}/{1}.BED {2} {0}/{1}_ucsc.BED".format(outdir, name, chromsizes)
subprocess.call(command.split())
os.remove("{}/{}.BED".format(outdir, name))
def genomeCoverage(name, genome, outdir, scale=None):
if scale:
outg2 = "{}/{}_rpm.bedGraph".format(outdir, name)
else:
outg2 = "{}/{}_ucsc.bedGraph".format(outdir, name)
# inbed = pybedtools.BedTool("{}/{}_ucsc.BED".format(outdir, name))
# print "==> Creating bedGraph...\n"
# if scale:
# outcov = inbed.genome_coverage(bg=True, genome=genome, scale=scale)
# else:
# outcov = inbed.genome_coverage(bg=True, genome=genome)
if scale:
command = "genomeCoverageBed -bg -scale {} -i {}/{}_ucsc.BED -g {} > {}".format(scale, outdir, name, genome, outg2)
subprocess.call(command, shell=True)
else:
command = "genomeCoverageBed -bg -i {}/{}_ucsc.BED -g {} > {}".format(outdir, name, genome, outg2)
subprocess.call(command, shell=True)
def bedgraphtobigwig(name, chrom, outdir, house=False, rpm=False):
print "==> Converting bedGraph to bigWig...\n"
if rpm:
command = "bedGraphToBigWig {0}/{1}_rpm.bedGraph {2} {0}/{1}_rpm.bw".format(outdir, name, chrom)
else:
command = "bedGraphToBigWig {0}/{1}_ucsc.bedGraph {2} {0}/{1}.bw".format(outdir, name, chrom)
subprocess.call(command.split())
def ConfigSectionMap(Config, section):
dict1 = {}
options = Config.options(section)
for option in options:
try:
dict1[option] = Config.get(section, option)
if dict1[option] == -1:
DebugPrint("skip: %s" % option)
except:
print("exception on %s!" % option)
dict1[option] = None
return dict1
def main():
parser = argparse.ArgumentParser(description='Processes ChIP-seq samples to bigWig tracks. Use either input or config file\n')
parser.add_argument('-c', '--config', help='Contains [Conditions] with bam files as keys.', required=False)
parser.add_argument('-i','--input', help='Input sam file', required=False)
parser.add_argument('-p', action='store_true', help='Use if samples are paired end. Required if using RPM normalisation', required=False)
parser.add_argument('-g','--genome', help='Genome the samples are aligned to, options include mm10/mm9/hg19', required=True)
parser.add_argument('-e', action='store_true', help='Are samples aligned to Ensembl genome?', required=False)
parser.add_argument('-rpm', action='store_true', help='Scale resulting bigwig to RPM', required=False)
parser.add_argument('-o', '--outdir', help='Output directory', required=True)
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
args = vars(parser.parse_args())
chrom = pkg_resources.resource_filename('pychiptools', 'data/{}.chrom.sizes'.format(args["genome"]))
path0 = os.getcwd()
if not os.path.isdir(args["outdir"]):
os.mkdir(args["outdir"])
if args["input"]:
name = os.path.basename(args["input"])
name = re.sub(".sam$", "", name)
count = convert_sam_bed(args["input"], name, args["p"], args["outdir"])
scale = float(1000000)/int(count)
change_for_ucsc(name, chrom, args["outdir"], args["e"])
if args["rpm"]:
genomeCoverage(name, chrom, args["outdir"], scale)
bedgraphtobigwig(name, chrom, args["outdir"], rpm=True)
else:
genomeCoverage(name, chrom, args["outdir"])
bedgraphtobigwig(name, chrom, args["outdir"])
elif args["config"]:
Config = ConfigParser.ConfigParser()
Config.optionxform = str
Config.read(args["config"])
conditions = ConfigSectionMap(Config, "Conditions")
for key in conditions:
name = os.path.basename(args["input"])
name = re.sub(".sam$", "", name)
count = convert_sam_bed(key, name, args["p"], args["outdir"])
scale = float(1000000)/int(count)
change_for_ucsc(name, chrom, args["outdir"], args["e"])
if args["rpm"]:
genomeCoverage(name, args["genome"], args["outdir"], scale)
bedgraphtobigwig(name, chrom, args["outdir"], rpm=True)
else:
genomeCoverage(name, args["genome"], args["outdir"])
bedgraphtobigwig(name, chrom, args["outdir"])
| pdl30/pychiptools | pychiptools/samtoucsc.py | Python | gpl-2.0 | 6,648 |
#!/usr/bin/python
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Refer to the README and COPYING files for full details of the license.
#
import logging
import logging.config
import os
import signal
import sys
import getopt
import ConfigParser
import cStringIO
from GuestAgentWin32 import WinVdsAgent
io = None
try:
import io as modio
io = modio
except ImportError:
import bytesio as modio
io = modio
from GuestAgentLinux2 import LinuxVdsAgent
#AGENT_CONFIG = '/etc/ovirt_guest_agent.conf'
#AGENT_DEFAULT_CONFIG = '/usr/share/ovirt_guest_agent/default.conf'
#AGENT_DEFAULT_LOG_CONFIG = '/usr/share/ovirt_guest_agent/default-logger.conf'
#AGENT_PIDFILE = '/run/ovirt_guest_agent.pid'
AGENT_CONFIG = 'C:\\ovirt-guest-agent\\ovirt-guest-agent\\ovirt-guest-agent\\ovirt-guest-agent.ini'
AGENT_DEFAULT_CONFIG = 'C:\\ovirt-guest-agent\\ovirt-guest-agent\\ovirt-guest-agent\\default.ini'
AGENT_DEFAULT_LOG_CONFIG = 'C:\\ovirt-guest-agent\\ovirt-guest-agent\\ovirt-guest-agent\\default-logger.ini'
AGENT_PIDFILE = 'C:\\ovirt-guest-agent\\ovirt-guest-agent\\ovirt-guest-agent\\ovirt-guest-agent.pid'
class OVirtAgentDaemon:
def __init__(self):
#cparser = ConfigParser.ConfigParser()
#cparser.read(AGENT_DEFAULT_LOG_CONFIG)
#cparser.read(AGENT_CONFIG)
#strio = cStringIO.StringIO()
#cparser.write(strio)
#bio = io.BytesIO(strio.getvalue())
try:
logging.config.fileConfig(AGENT_CONFIG)
except Exception, e:
print e
#bio.close()
#strio.close()
def run(self, daemon, pidfile):
logging.info("Starting oVirt guest agent")
config = ConfigParser.ConfigParser()
config.read(AGENT_DEFAULT_CONFIG)
config.read(AGENT_DEFAULT_LOG_CONFIG)
config.read(AGENT_CONFIG)
self.vdsAgent = WinVdsAgent(config)
self.vdsAgent.run()
logging.info("oVirt guest agent is down.")
def _daemonize(self):
if os.getppid() == 1:
raise RuntimeError("already a daemon")
pid = os.fork()
if pid == 0:
os.umask(0)
os.setsid()
os.chdir("/")
self._reopen_file_as_null(sys.stdin)
self._reopen_file_as_null(sys.stdout)
self._reopen_file_as_null(sys.stderr)
else:
os._exit(0)
def _reopen_file_as_null(self, oldfile):
nullfile = file("/dev/null", "rw")
os.dup2(nullfile.fileno(), oldfile.fileno())
nullfile.close()
def register_signal_handler(self):
def sigterm_handler(signum, frame):
logging.debug("Handling signal %d" % (signum))
if signum == signal.SIGTERM:
logging.info("Stopping oVirt guest agent")
self.agent.stop()
signal.signal(signal.SIGTERM, sigterm_handler)
def usage():
print "Usage: %s [OPTION]..." % (sys.argv[0])
print ""
print " -p, --pidfile\t\tset pid file name (default: %s)" % AGENT_PIDFILE
print " -d\t\t\trun program as a daemon."
print " -h, --help\t\tdisplay this help and exit."
print ""
if __name__ == '__main__':
try:
try:
opts, args = getopt.getopt(sys.argv[1:],
"?hp:d", ["help", "pidfile="])
pidfile = AGENT_PIDFILE
daemon = False
for opt, value in opts:
if opt in ("-h", "-?", "--help"):
usage()
os._exit(2)
elif opt in ("-p", "--pidfile"):
pidfile = value
elif opt in ("-d"):
daemon = True
agent = OVirtAgentDaemon()
agent.run(daemon, pidfile)
except getopt.GetoptError, err:
print str(err)
print "Try `%s --help' for more information." % (sys.argv[0])
os._exit(2)
except:
logging.exception("Unhandled exception in oVirt guest agent!")
sys.exit(1)
finally:
try:
os.unlink(AGENT_PIDFILE)
except:
pass
| silenceli/oga-windows | ovirt-guest-agent/ovirt-guest-agent.py | Python | apache-2.0 | 4,625 |
#!/usr/bin/env python
"""
This is a very small app using the FloatCanvas
It tests the Spline object, including how you can put points together to
create an object with curves and square corners.
"""
import wx
#### import local version:
#import sys
#sys.path.append("../")
#from floatcanvas import NavCanvas
#from floatcanvas import FloatCanvas as FC
from wx.lib.floatcanvas import FloatCanvas as FC
from wx.lib.floatcanvas import NavCanvas
class Spline(FC.Line):
def __init__(self, *args, **kwargs):
FC.Line.__init__(self, *args, **kwargs)
def _Draw(self, dc , WorldToPixel, ScaleWorldToPixel, HTdc=None):
Points = WorldToPixel(self.Points)
dc.SetPen(self.Pen)
dc.DrawSpline(Points)
if HTdc and self.HitAble:
HTdc.SetPen(self.HitPen)
HTdc.DrawSpline(Points)
class DrawFrame(wx.Frame):
"""
A frame used for the FloatCanvas
"""
def __init__(self, *args, **kwargs):
wx.Frame.__init__(self, *args, **kwargs)
## Set up the MenuBar
MenuBar = wx.MenuBar()
file_menu = wx.Menu()
item = file_menu.Append(-1, "&Close","Close this frame")
self.Bind(wx.EVT_MENU, self.OnQuit, item)
MenuBar.Append(file_menu, "&File")
help_menu = wx.Menu()
item = help_menu.Append(-1, "&About",
"More information About this program")
self.Bind(wx.EVT_MENU, self.OnAbout, item)
MenuBar.Append(help_menu, "&Help")
self.SetMenuBar(MenuBar)
self.CreateStatusBar()
# Add the Canvas
self.Canvas = NavCanvas.NavCanvas(self,
BackgroundColor = "White",
).Canvas
self.Canvas.Bind(FC.EVT_MOTION, self.OnMove)
self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)
self.DrawTest()
self.Show()
self.Canvas.ZoomToBB()
def OnAbout(self, event):
print("OnAbout called")
dlg = wx.MessageDialog(self, "This is a small program to demonstrate\n"
"the use of the FloatCanvas\n",
"About Me", wx.OK | wx.ICON_INFORMATION)
dlg.ShowModal()
dlg.Destroy()
def OnMove(self, event):
"""
Updates the status bar with the world coordinates
"""
self.SetStatusText("%.2f, %.2f"%tuple(event.Coords))
def OnQuit(self,event):
self.Close(True)
def OnCloseWindow(self, event):
self.Destroy()
def DrawTest(self,event=None):
wx.GetApp().Yield()
Canvas = self.Canvas
Points = [(0, 0),
(200,0),
(200,0),
(200,0),
(200,15),
(185,15),
(119,15),
(104,15),
(104,30),
(104,265),
(104,280),
(119,280),
(185,280),
(200,280),
(200,295),
(200,295),
(200,295),
(0, 295),
(0, 295),
(0, 295),
(0, 280),
(15, 280),
(81, 280),
(96, 280),
(96, 265),
(96, 30),
(96, 15),
(81, 15),
(15, 15),
(0, 15),
(0, 0),
]
Canvas.ClearAll()
MyLine = FC.Spline(Points,
LineWidth = 3,
LineColor = "Blue")
Canvas.AddObject(MyLine)
Canvas.AddPointSet(Points,
Color = "Red",
Diameter = 4,
)
## A regular old spline:
Points = [(-30, 260),
(-10, 130),
(70, 185),
(160,60),
]
Canvas.AddSpline(Points,
LineWidth = 5,
LineColor = "Purple")
class DemoApp(wx.App):
def __init__(self, *args, **kwargs):
wx.App.__init__(self, *args, **kwargs)
def OnInit(self):
frame = DrawFrame(None, title="FloatCanvas Spline Demo", size = (700,700))
self.SetTopWindow(frame)
return True
app = DemoApp(False)# put in True if you want output to go to it's own window.
app.MainLoop()
| dnxbjyj/python-basic | gui/wxpython/wxPython-demo-4.0.1/samples/floatcanvas/TestSpline.py | Python | mit | 4,596 |
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for parametric_attention."""
import numpy as np
import tensorflow as tf
from multiple_user_representations.models import parametric_attention
class ParametricAttentionTest(tf.test.TestCase):
def test_parametric_attention_model_with_single_representation(self):
model = parametric_attention.SimpleParametricAttention(
output_dimension=2,
input_embedding_dimension=2,
vocab_size=10,
num_representations=1,
max_sequence_size=20)
input_batch = tf.convert_to_tensor(
np.random.randint(low=0, high=10, size=(10, 20)))
output = model(input_batch)
self.assertIsInstance(model, tf.keras.Model)
self.assertSequenceEqual(output.numpy().shape, [10, 1, 2])
def test_parametric_attention_model_with_multiple_representations(self):
model = parametric_attention.SimpleParametricAttention(
output_dimension=2,
input_embedding_dimension=2,
vocab_size=10,
num_representations=3,
max_sequence_size=20)
input_batch = tf.convert_to_tensor(
np.random.randint(low=0, high=10, size=(10, 20)))
output = model(input_batch)
self.assertIsInstance(model, tf.keras.Model)
self.assertSequenceEqual(output.numpy().shape, [10, 3, 2])
if __name__ == '__main__':
tf.test.main()
| google-research/google-research | multiple_user_representations/models/parametric_attention_test.py | Python | apache-2.0 | 1,915 |
path_to_database = 'sqlite:////home/jcharante/Projects/Baxterite/baxterite/src/server/database.db' | baxter-oop/baxterite | src/server/config.py | Python | mit | 98 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import re
from typing import TYPE_CHECKING, Any, Optional, Sequence
from airflow.models import BaseOperator
from airflow.providers.apache.pig.hooks.pig import PigCliHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class PigOperator(BaseOperator):
"""
Executes pig script.
:param pig: the pig latin script to be executed. (templated)
:param pig_cli_conn_id: reference to the Hive database
:param pigparams_jinja_translate: when True, pig params-type templating
${var} gets translated into jinja-type templating {{ var }}. Note that
you may want to use this along with the
``DAG(user_defined_macros=myargs)`` parameter. View the DAG
object documentation for more details.
:param pig_opts: pig options, such as: -x tez, -useHCatalog, ...
"""
template_fields: Sequence[str] = ('pig',)
template_ext: Sequence[str] = (
'.pig',
'.piglatin',
)
ui_color = '#f0e4ec'
def __init__(
self,
*,
pig: str,
pig_cli_conn_id: str = 'pig_cli_default',
pigparams_jinja_translate: bool = False,
pig_opts: Optional[str] = None,
**kwargs: Any,
) -> None:
super().__init__(**kwargs)
self.pigparams_jinja_translate = pigparams_jinja_translate
self.pig = pig
self.pig_cli_conn_id = pig_cli_conn_id
self.pig_opts = pig_opts
self.hook: Optional[PigCliHook] = None
def prepare_template(self):
if self.pigparams_jinja_translate:
self.pig = re.sub(r"(\$([a-zA-Z_][a-zA-Z0-9_]*))", r"{{ \g<2> }}", self.pig)
def execute(self, context: 'Context'):
self.log.info('Executing: %s', self.pig)
self.hook = PigCliHook(pig_cli_conn_id=self.pig_cli_conn_id)
self.hook.run_cli(pig=self.pig, pig_opts=self.pig_opts)
def on_kill(self):
self.hook.kill()
| Acehaidrey/incubator-airflow | airflow/providers/apache/pig/operators/pig.py | Python | apache-2.0 | 2,694 |
Subsets and Splits