repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
FederatedAI/FATE
|
python/federatedml/param/encrypted_mode_calculation_param.py
|
1
|
1888
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from federatedml.param.base_param import BaseParam
class EncryptedModeCalculatorParam(BaseParam):
"""
Define the encrypted_mode_calulator parameters.
Parameters
----------
mode: str, support 'strict', 'fast', 'balance', 'confusion_opt', ' only, default: strict
re_encrypted_rate: float or int, numeric number in [0, 1], use when mode equals to 'balance, default: 1
"""
def __init__(self, mode="strict", re_encrypted_rate=1):
self.mode = mode
self.re_encrypted_rate = re_encrypted_rate
def check(self):
descr = "encrypted_mode_calculator param"
self.mode = self.check_and_change_lower(self.mode,
["strict", "fast", "balance", "confusion_opt", "confusion_opt_balance"],
descr)
if self.mode in ["balance", "confusion_opt_balance"]:
if type(self.re_encrypted_rate).__name__ not in ["int", "long", "float"]:
raise ValueError("re_encrypted_rate should be a numeric number")
if not 0.0 <= self.re_encrypted_rate <= 1:
raise ValueError("re_encrypted_rate should in [0, 1]")
return True
|
apache-2.0
| -2,847,998,658,587,991,000 | 36.019608 | 120 | 0.630826 | false | 3.983122 | false | false | false |
appleseedhq/cortex
|
python/IECoreMaya/Collapsible.py
|
5
|
8941
|
##########################################################################
#
# Copyright (c) 2011, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import maya.cmds
import maya.OpenMaya
import IECoreMaya
## In Maya 2011 and 2012, the collapsible frameLayout became rather ugly,
# and stopped indenting the arrow with the label. This made complex uis
# consisting of lots of ClassVectorParameters and ClassParameters somewhat
# unreadable. So we introduce this class to get back some control. Aside
# from spelling collapsible properly and being prettier, this class also
# has the advantage of supporting annotations which are displayed on the label.
# As with the maya frameLayout, the preExpandCommand, expandCommand and
# collapseCommand are only called as a result of user action, and never as
# a result of a call to setCollapsed or getCollapsed. There are separate
# implementations for maya before qt and maya after qt.
class _CollapsibleMotif( IECoreMaya.UIElement ) :
def __init__( self,
label="",
labelVisible=True,
labelIndent=0,
labelFont = "boldLabelFont",
annotation="",
collapsed = True,
preExpandCommand = None,
expandCommand = None,
collapseCommand = None,
) :
kw = {}
if preExpandCommand is not None :
kw["preExpandCommand"] = preExpandCommand
if expandCommand is not None :
kw["expandCommand"] = expandCommand
if collapseCommand is not None :
kw["collapseCommand"] = collapseCommand
# implementation for motif is pretty simple - just a frame layout
IECoreMaya.UIElement.__init__( self,
maya.cmds.frameLayout(
label = label,
labelVisible = labelVisible,
labelIndent = labelIndent,
labelAlign = "center",
font = labelFont,
borderVisible = False,
collapsable = True,
collapse = collapsed,
marginWidth = 0,
**kw
)
)
# can't display it but at least we can store it
self.__annotation = annotation
self.__frameLayout = self._topLevelUI()
## The maya frameLayout whose collapsibility is controlled by this
# class. Add children by editing the contents of this layout.
def frameLayout( self ) :
return self._topLevelUI()
def setLabel( self, label ) :
maya.cmds.frameLayout( self.frameLayout(), edit=True, label = label )
def getLabel( self ) :
return maya.cmds.frameLayout( self.frameLayout(), query=True, label = True )
def setAnnotation( self, annotation ) :
self.__annotation = annotation
def getAnnotation( self ) :
return self.__annotation
def getCollapsed( self ) :
return maya.cmds.frameLayout( self.frameLayout(), query=True, collapse=True )
def setCollapsed( self, collapsed ) :
maya.cmds.frameLayout( self.frameLayout(), edit=True, collapse=collapsed )
class _CollapsibleQt( IECoreMaya.UIElement ) :
def __init__( self,
label="",
labelVisible=True,
labelIndent=0,
labelFont = "boldLabelFont",
annotation="",
collapsed = True,
preExpandCommand = None,
expandCommand = None,
collapseCommand = None,
) :
IECoreMaya.UIElement.__init__( self, maya.cmds.formLayout() )
attachForm = []
attachControl = []
# make the layout to put things in. this is actually a frameLayout, just
# with the ugly header bit we don't like hidden.
########################################################################
self.__frameLayout = maya.cmds.frameLayout(
labelVisible = False,
borderVisible = False,
collapsable = True,
collapse = collapsed,
marginWidth = 0,
)
# passing borderVisible=False to the constructor does bugger all so we have to do it with
# an edit
maya.cmds.frameLayout( self.__frameLayout, edit=True, borderVisible=False, marginWidth=0 )
attachForm.append( ( self.__frameLayout, "left", 0 ) )
attachForm.append( ( self.__frameLayout, "right", 0 ) )
attachForm.append( ( self.__frameLayout, "bottom", 0 ) )
# optional header, with the triangle for expanding and collapsing
########################################################################
self.__collapsibleIcon = None
self.__labelControl = None
if labelVisible :
# have to make one button for the icon and one for the label
# because otherwise the icon size changes when we toggle
# the image, and the text moves.
self.__collapsibleIcon = maya.cmds.iconTextButton(
parent = self._topLevelUI(),
height = 20,
width = 15,
image = "arrowRight.xpm",
command = self.__toggle,
annotation = annotation,
)
self.__labelControl = maya.cmds.iconTextButton(
parent = self._topLevelUI(),
height = 20,
label = label,
# the font flag appears to do nothing, but maybe it will
# miraculously be supported in the future?
font = labelFont,
style = "textOnly",
command = self.__toggle,
annotation = annotation,
)
attachForm.append( ( self.__collapsibleIcon, "left", labelIndent ) )
attachForm.append( ( self.__collapsibleIcon, "top", 0 ) )
attachForm.append( ( self.__labelControl, "top", 0 ) )
attachControl.append( ( self.__labelControl, "left", 0, self.__collapsibleIcon ) )
attachControl.append( ( self.__frameLayout, "top", 0, self.__labelControl ) )
else :
attachForm.append( ( self.__frameLayout, "top", 0 ) )
maya.cmds.formLayout(
self._topLevelUI(),
edit = True,
attachForm = attachForm,
attachControl = attachControl,
)
maya.cmds.setParent( self.__frameLayout )
self.__annotation = annotation
self.__labelText = label
self.__preExpandCommand = preExpandCommand
self.__expandCommand = expandCommand
self.__collapseCommand = collapseCommand
## The maya frameLayout whose collapsibility is controlled by this
# class. Add children by editing the contents of this layout.
def frameLayout( self ) :
return self.__frameLayout
def setLabel( self, label ) :
self.__labelText = label
if self.__labelControl is not None :
maya.cmds.iconTextButton( self.__labelControl, edit=True, label=label )
def getLabel( self ) :
return self.__labelText
def setAnnotation( self, annotation ) :
self.__annotation = annotation
if self.__labelControl is not None :
maya.cmds.iconTextButton( self.__labelControl, edit=True, annotation=annotation )
maya.cmds.iconTextButton( self.__collapsibleIcon, edit=True, annotation=annotation )
def getAnnotation( self ) :
return self.__annotation
def getCollapsed( self ) :
return maya.cmds.frameLayout( self.__frameLayout, query=True, collapse=True )
def setCollapsed( self, collapsed ) :
maya.cmds.frameLayout( self.__frameLayout, edit=True, collapse=collapsed )
if self.__collapsibleIcon is not None :
maya.cmds.iconTextButton(
self.__collapsibleIcon,
edit = True,
image = "arrowRight.xpm" if collapsed else "arrowDown.xpm",
)
def __toggle( self ) :
collapsed = not self.getCollapsed()
if not collapsed and self.__preExpandCommand is not None :
self.__preExpandCommand()
self.setCollapsed( not self.getCollapsed() )
if collapsed :
if self.__collapseCommand is not None :
self.__collapseCommand()
else :
if self.__expandCommand is not None :
self.__expandCommand()
# choose the right implementation based on the current maya version
if maya.OpenMaya.MGlobal.apiVersion() >= 201100 :
Collapsible = _CollapsibleQt
else :
Collapsible = _CollapsibleMotif
|
bsd-3-clause
| -3,500,785,268,951,882,000 | 29.831034 | 92 | 0.690527 | false | 3.728524 | false | false | false |
lefnire/tensorforce
|
tensorforce/models/q_naf_model.py
|
1
|
8828
|
# Copyright 2017 reinforce.io. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six.moves import xrange
import tensorflow as tf
from tensorforce import util, TensorForceError
from tensorforce.models import QModel
from tensorforce.core.networks import Linear
class QNAFModel(QModel):
def __init__(
self,
states,
actions,
scope,
device,
saver,
summarizer,
execution,
batching_capacity,
variable_noise,
states_preprocessing,
actions_exploration,
reward_preprocessing,
update_mode,
memory,
optimizer,
discount,
network,
distributions,
entropy_regularization,
target_sync_frequency,
target_update_weight,
double_q_model,
huber_loss
):
if any(action['type'] != 'float' or 'min_value' in action or 'max_value' in action for action in actions.values()):
raise TensorForceError("Only unconstrained float actions valid for NAFModel.")
super(QNAFModel, self).__init__(
states=states,
actions=actions,
scope=scope,
device=device,
saver=saver,
summarizer=summarizer,
execution=execution,
batching_capacity=batching_capacity,
variable_noise=variable_noise,
states_preprocessing=states_preprocessing,
actions_exploration=actions_exploration,
reward_preprocessing=reward_preprocessing,
update_mode=update_mode,
memory=memory,
optimizer=optimizer,
discount=discount,
network=network,
distributions=distributions,
entropy_regularization=entropy_regularization,
target_sync_frequency=target_sync_frequency,
target_update_weight=target_update_weight,
double_q_model=double_q_model,
huber_loss=huber_loss
)
def initialize(self, custom_getter):
super(QNAFModel, self).initialize(custom_getter)
self.state_values = dict()
self.l_entries = dict()
for name, action in self.actions_spec.items():
num_action = util.prod(action['shape'])
self.state_values[name] = Linear(size=num_action, scope='state-value')
self.l_entries[name] = Linear(size=(num_action * (num_action - 1) // 2), scope='l-entries')
def tf_q_value(self, embedding, distr_params, action, name):
num_action = util.prod(self.actions_spec[name]['shape'])
mean, stddev, _ = distr_params
flat_mean = tf.reshape(tensor=mean, shape=(-1, num_action))
flat_stddev = tf.reshape(tensor=stddev, shape=(-1, num_action))
# Advantage computation
# Network outputs entries of lower triangular matrix L
if self.l_entries[name] is None:
l_matrix = flat_stddev
l_matrix = tf.exp(l_matrix)
else:
l_matrix = tf.map_fn(fn=tf.diag, elems=flat_stddev)
l_entries = self.l_entries[name].apply(x=embedding)
l_entries = tf.exp(l_entries)
offset = 0
columns = list()
for zeros, size in enumerate(xrange(num_action - 1, -1, -1), 1):
column = tf.pad(tensor=l_entries[:, offset: offset + size], paddings=((0, 0), (zeros, 0)))
columns.append(column)
offset += size
l_matrix += tf.stack(values=columns, axis=1)
# P = LL^T
p_matrix = tf.matmul(a=l_matrix, b=tf.transpose(a=l_matrix, perm=(0, 2, 1)))
# A = -0.5 (a - mean)P(a - mean)
flat_action = tf.reshape(tensor=action, shape=(-1, num_action))
difference = flat_action - flat_mean
advantage = tf.matmul(a=p_matrix, b=tf.expand_dims(input=difference, axis=2))
advantage = tf.matmul(a=tf.expand_dims(input=difference, axis=1), b=advantage)
advantage = tf.squeeze(input=(-advantage / 2.0), axis=2)
# Q = A + V
# State-value function
state_value = self.state_values[name].apply(x=embedding)
q_value = state_value + advantage
return tf.reshape(tensor=q_value, shape=((-1,) + self.actions_spec[name]['shape']))
def tf_loss_per_instance(self, states, internals, actions, terminal, reward, next_states, next_internals, update, reference=None):
# Michael: doubling this function because NAF needs V'(s) not Q'(s), see comment below
embedding = self.network.apply(x=states, internals=internals, update=update)
# Both networks can use the same internals, could that be a problem?
# Otherwise need to handle internals indices correctly everywhere
target_embedding = self.target_network.apply(
x=next_states,
internals=next_internals,
update=update
)
deltas = list()
for name, distribution in self.distributions.items():
target_distribution = self.target_distributions[name]
distr_params = distribution.parameterize(x=embedding)
target_distr_params = target_distribution.parameterize(x=target_embedding)
q_value = self.tf_q_value(embedding=embedding, distr_params=distr_params, action=actions[name], name=name)
# Notice, this is V', not Q' because NAF outputs V(s) separately
next_state_value = target_distribution.state_value(distr_params=target_distr_params)
delta = self.tf_q_delta(q_value=q_value, next_q_value=next_state_value, terminal=terminal, reward=reward)
collapsed_size = util.prod(util.shape(delta)[1:])
delta = tf.reshape(tensor=delta, shape=(-1, collapsed_size))
deltas.append(delta)
# Surrogate loss as the mean squared error between actual observed rewards and expected rewards
loss_per_instance = tf.reduce_mean(input_tensor=tf.concat(values=deltas, axis=1), axis=1)
if self.huber_loss is not None and self.huber_loss > 0.0:
return tf.where(
condition=(tf.abs(x=loss_per_instance) <= self.huber_loss),
x=(0.5 * tf.square(x=loss_per_instance)),
y=(self.huber_loss * (tf.abs(x=loss_per_instance) - 0.5 * self.huber_loss))
)
else:
return tf.square(x=loss_per_instance)
def tf_regularization_losses(self, states, internals, update):
losses = super(QNAFModel, self).tf_regularization_losses(
states=states,
internals=internals,
update=update
)
for state_value in self.state_values.values():
regularization_loss = state_value.regularization_loss()
if regularization_loss is not None:
if 'state-values' in losses:
losses['state-values'] += regularization_loss
else:
losses['state-values'] = regularization_loss
for l_entries in self.l_entries.values():
regularization_loss = l_entries.regularization_loss()
if regularization_loss is not None:
if 'l-entries' in losses:
losses['l-entries'] += regularization_loss
else:
losses['l-entries'] = regularization_loss
return losses
def get_variables(self, include_submodules=False, include_nontrainable=False):
model_variables = super(QNAFModel, self).get_variables(
include_submodules=include_submodules,
include_nontrainable=include_nontrainable
)
state_values_variables = [
variable for name in sorted(self.state_values)
for variable in self.state_values[name].get_variables()
]
model_variables += state_values_variables
l_entries_variables = [
variable for name in sorted(self.l_entries)
for variable in self.l_entries[name].get_variables()
]
model_variables += l_entries_variables
return model_variables
|
apache-2.0
| 6,982,727,479,257,415,000 | 38.410714 | 134 | 0.609538 | false | 4.073835 | false | false | false |
victorianorton/SimpleRPGGame
|
src/game/Creature.py
|
1
|
2202
|
from src.common.Observable import*
from src.game.AttackSpell import *
from src.game.AttackWeapon import *
from src.game.Attributes import *
from src.game.visit import *
from src.game.Heroes import *
#from src.game.AttackInventory import*
from random import*
class Creatures(Observable, GameAttributes, Visitor):
def __init__(self):
super(Creatures, self).__init__()
self._attackSpell = AttackSpell()
self._attackWeapon = AttackWeapon()
self._name = ''
self.gameAttributes = GameAttributes()
self.health = 1
self.healthMax = 1
def doDamage(self, monster):
self.damage = min(
max(randint(0, 2) - randint(0, monster.health), 0),
monster.health)
monster.health -= self.damage
if self.damage == 0:
print ("%s avoids heros's attack." % monster)
else:
print ("hero injures %s!" % monster)
return monster.health <= 0
def setAttStr(self, strength, con, dex, intt):
self.Strength = strength
self.Constitution = con
self.Dexterity = dex
self.Intelligence = intt
def setAttackSpell(self, attackSpell):
self.attackSpell = attackSpell
def setAttackWeapon(self, attackWeapon):
self.attackWeapon = attackWeapon
def AttackSpell(self):
self.attackSpell()
def AttackWeapon(self):
self.attackWeapon.attackWeapon()
def planMove(self):
self.ready = True
def roam(self):
print ("%s is roaming around the castle" % self._name)
self.notifyObservers()
def act(self):
if self.ready:
self.roam()
self.ready = False
def north(self):
print ("%s is moving in the direction north" % self._name)
self.roam()
self.notifyObservers()
def south(self):
print ("%s is moving in the direction south" % self._name)
self.roam()
def east(self):
print ("%s is moving in the direction east" % self._name)
self.roam()
def west(self):
print ("%s is moving in the direction west" % self._name)
self.roam()
def Display(self):
pass
|
mit
| 8,173,256,232,380,542,000 | 26.197531 | 66 | 0.601726 | false | 3.663894 | false | false | false |
eric-stanley/NewsBlur
|
apps/social/models.py
|
1
|
137635
|
import datetime
import time
import zlib
import hashlib
import redis
import re
import mongoengine as mongo
import random
import requests
import HTMLParser
from collections import defaultdict
from pprint import pprint
from BeautifulSoup import BeautifulSoup
from mongoengine.queryset import Q
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.template.loader import render_to_string
from django.template.defaultfilters import slugify
from django.core.mail import EmailMultiAlternatives
from apps.reader.models import UserSubscription, RUserStory
from apps.analyzer.models import MClassifierFeed, MClassifierAuthor, MClassifierTag, MClassifierTitle
from apps.analyzer.models import apply_classifier_titles, apply_classifier_feeds, apply_classifier_authors, apply_classifier_tags
from apps.rss_feeds.models import Feed, MStory
from apps.rss_feeds.text_importer import TextImporter
from apps.profile.models import Profile, MSentEmail
from vendor import facebook
from vendor import tweepy
from vendor import appdotnet
from vendor import pynliner
from utils import log as logging
from utils import json_functions as json
from utils.feed_functions import relative_timesince, chunks
from utils.story_functions import truncate_chars, strip_tags, linkify, image_size
from utils.scrubber import SelectiveScriptScrubber
from utils import s3_utils
from StringIO import StringIO
RECOMMENDATIONS_LIMIT = 5
IGNORE_IMAGE_SOURCES = [
"http://feeds.feedburner.com"
]
class MRequestInvite(mongo.Document):
email = mongo.EmailField()
request_date = mongo.DateTimeField(default=datetime.datetime.now)
invite_sent = mongo.BooleanField(default=False)
invite_sent_date = mongo.DateTimeField()
meta = {
'collection': 'social_invites',
'allow_inheritance': False,
}
def __unicode__(self):
return "%s%s" % (self.email, '*' if self.invite_sent else '')
@classmethod
def blast(cls):
invites = cls.objects.filter(email_sent=None)
print ' ---> Found %s invites...' % invites.count()
for invite in invites:
try:
invite.send_email()
except:
print ' ***> Could not send invite to: %s. Deleting.' % invite.username
invite.delete()
def send_email(self):
user = User.objects.filter(username__iexact=self.username)
if not user:
user = User.objects.filter(email__iexact=self.username)
if user:
user = user[0]
email = user.email or self.username
else:
user = {
'username': self.username,
'profile': {
'autologin_url': '/',
}
}
email = self.username
params = {
'user': user,
}
text = render_to_string('mail/email_social_beta.txt', params)
html = render_to_string('mail/email_social_beta.xhtml', params)
subject = "Psst, you're in..."
msg = EmailMultiAlternatives(subject, text,
from_email='NewsBlur <%s>' % settings.HELLO_EMAIL,
to=['<%s>' % (email)])
msg.attach_alternative(html, "text/html")
msg.send()
self.email_sent = True
self.save()
logging.debug(" ---> ~BB~FM~SBSending email for social beta: %s" % self.username)
class MSocialProfile(mongo.Document):
user_id = mongo.IntField(unique=True)
username = mongo.StringField(max_length=30, unique=True)
email = mongo.StringField()
bio = mongo.StringField(max_length=160)
blurblog_title = mongo.StringField(max_length=256)
custom_bgcolor = mongo.StringField(max_length=50)
custom_css = mongo.StringField()
photo_url = mongo.StringField()
photo_service = mongo.StringField()
location = mongo.StringField(max_length=40)
website = mongo.StringField(max_length=200)
bb_permalink_direct = mongo.BooleanField()
subscription_count = mongo.IntField(default=0)
shared_stories_count = mongo.IntField(default=0)
following_count = mongo.IntField(default=0)
follower_count = mongo.IntField(default=0)
following_user_ids = mongo.ListField(mongo.IntField())
follower_user_ids = mongo.ListField(mongo.IntField())
unfollowed_user_ids = mongo.ListField(mongo.IntField())
requested_follow_user_ids = mongo.ListField(mongo.IntField())
popular_publishers = mongo.StringField()
stories_last_month = mongo.IntField(default=0)
average_stories_per_month = mongo.IntField(default=0)
story_count_history = mongo.ListField()
feed_classifier_counts = mongo.DictField()
favicon_color = mongo.StringField(max_length=6)
protected = mongo.BooleanField()
private = mongo.BooleanField()
meta = {
'collection': 'social_profile',
'indexes': ['user_id', 'following_user_ids', 'follower_user_ids', 'unfollowed_user_ids', 'requested_follow_user_ids'],
'allow_inheritance': False,
'index_drop_dups': True,
}
def __unicode__(self):
return "%s [%s] following %s/%s, shared %s" % (self.username, self.user_id,
self.following_count, self.follower_count, self.shared_stories_count)
@classmethod
def get_user(cls, user_id):
profile, created = cls.objects.get_or_create(user_id=user_id)
if created:
profile.save()
return profile
def save(self, *args, **kwargs):
if not self.username:
self.import_user_fields()
if not self.subscription_count:
self.count_follows(skip_save=True)
if self.bio and len(self.bio) > MSocialProfile.bio.max_length:
self.bio = self.bio[:80]
if self.bio:
self.bio = strip_tags(self.bio)
if self.website:
self.website = strip_tags(self.website)
if self.location:
self.location = strip_tags(self.location)
if self.custom_css:
self.custom_css = strip_tags(self.custom_css)
super(MSocialProfile, self).save(*args, **kwargs)
if self.user_id not in self.following_user_ids:
self.follow_user(self.user_id, force=True)
self.count_follows()
return self
@property
def blurblog_url(self):
return "http://%s.%s" % (
self.username_slug,
Site.objects.get_current().domain.replace('www.', ''))
@property
def blurblog_rss(self):
return "%s%s" % (self.blurblog_url, reverse('shared-stories-rss-feed',
kwargs={'user_id': self.user_id,
'username': self.username_slug}))
def find_stories(self, query, offset=0, limit=25):
stories_db = MSharedStory.objects(
Q(user_id=self.user_id) &
(Q(story_title__icontains=query) |
Q(story_author_name__icontains=query) |
Q(story_tags__icontains=query))
).order_by('-shared_date')[offset:offset+limit]
stories = Feed.format_stories(stories_db)
return stories
def recommended_users(self):
r = redis.Redis(connection_pool=settings.REDIS_POOL)
following_key = "F:%s:F" % (self.user_id)
social_follow_key = "FF:%s:F" % (self.user_id)
profile_user_ids = []
# Find potential twitter/fb friends
services = MSocialServices.objects.get(user_id=self.user_id)
facebook_user_ids = [u.user_id for u in
MSocialServices.objects.filter(facebook_uid__in=services.facebook_friend_ids).only('user_id')]
twitter_user_ids = [u.user_id for u in
MSocialServices.objects.filter(twitter_uid__in=services.twitter_friend_ids).only('user_id')]
social_user_ids = facebook_user_ids + twitter_user_ids
# Find users not currently followed by this user
r.delete(social_follow_key)
nonfriend_user_ids = []
if social_user_ids:
r.sadd(social_follow_key, *social_user_ids)
nonfriend_user_ids = r.sdiff(social_follow_key, following_key)
profile_user_ids = [int(f) for f in nonfriend_user_ids]
r.delete(social_follow_key)
# Not enough? Grab popular users.
if len(nonfriend_user_ids) < RECOMMENDATIONS_LIMIT:
homepage_user = User.objects.get(username='popular')
suggested_users_list = r.sdiff("F:%s:F" % homepage_user.pk, following_key)
suggested_users_list = [int(f) for f in suggested_users_list]
suggested_user_ids = []
slots_left = min(len(suggested_users_list), RECOMMENDATIONS_LIMIT - len(nonfriend_user_ids))
for slot in range(slots_left):
suggested_user_ids.append(random.choice(suggested_users_list))
profile_user_ids.extend(suggested_user_ids)
# Sort by shared story count
profiles = MSocialProfile.profiles(profile_user_ids).order_by('-shared_stories_count')[:RECOMMENDATIONS_LIMIT]
return profiles
@property
def username_slug(self):
return slugify(self.username)
def count_stories(self):
# Popular Publishers
self.save_popular_publishers()
def save_popular_publishers(self, feed_publishers=None):
if not feed_publishers:
publishers = defaultdict(int)
for story in MSharedStory.objects(user_id=self.user_id).only('story_feed_id')[:500]:
publishers[story.story_feed_id] += 1
feed_titles = dict((f.id, f.feed_title)
for f in Feed.objects.filter(pk__in=publishers.keys()).only('id', 'feed_title'))
feed_publishers = sorted([{'id': k, 'feed_title': feed_titles[k], 'story_count': v}
for k, v in publishers.items()
if k in feed_titles],
key=lambda f: f['story_count'],
reverse=True)[:20]
popular_publishers = json.encode(feed_publishers)
if len(popular_publishers) < 1023:
self.popular_publishers = popular_publishers
self.save()
return
if len(popular_publishers) > 1:
self.save_popular_publishers(feed_publishers=feed_publishers[:-1])
@classmethod
def profile(cls, user_id, include_follows=True):
profile = cls.get_user(user_id)
return profile.canonical(include_follows=True)
@classmethod
def profiles(cls, user_ids):
profiles = cls.objects.filter(user_id__in=user_ids)
return profiles
@classmethod
def profile_feeds(cls, user_ids):
profiles = cls.objects.filter(user_id__in=user_ids)
profiles = dict((p.user_id, p.feed()) for p in profiles)
return profiles
@classmethod
def sync_all_redis(cls):
for profile in cls.objects.all():
profile.sync_redis(force=True)
def sync_redis(self, force=False):
self.following_user_ids = list(set(self.following_user_ids))
self.save()
for user_id in self.following_user_ids:
self.follow_user(user_id, force=force)
self.follow_user(self.user_id)
@property
def title(self):
return self.blurblog_title if self.blurblog_title else self.username + "'s blurblog"
def feed(self):
params = self.canonical(compact=True)
params.update({
'feed_title': self.title,
'page_url': reverse('load-social-page', kwargs={'user_id': self.user_id, 'username': self.username_slug}),
'shared_stories_count': self.shared_stories_count,
})
return params
def page(self):
params = self.canonical(include_follows=True)
params.update({
'feed_title': self.title,
'custom_css': self.custom_css,
})
return params
@property
def profile_photo_url(self):
if self.photo_url:
return self.photo_url
return settings.MEDIA_URL + 'img/reader/default_profile_photo.png'
@property
def large_photo_url(self):
photo_url = self.email_photo_url
if 'graph.facebook.com' in photo_url:
return photo_url + '?type=large'
elif 'twimg' in photo_url:
return photo_url.replace('_normal', '')
elif '/avatars/' in photo_url:
return photo_url.replace('thumbnail_', 'large_')
return photo_url
@property
def email_photo_url(self):
if self.photo_url:
if self.photo_url.startswith('//'):
self.photo_url = 'http:' + self.photo_url
return self.photo_url
domain = Site.objects.get_current().domain
return 'http://' + domain + settings.MEDIA_URL + 'img/reader/default_profile_photo.png'
def canonical(self, compact=False, include_follows=False, common_follows_with_user=None,
include_settings=False, include_following_user=None):
domain = Site.objects.get_current().domain
params = {
'id': 'social:%s' % self.user_id,
'user_id': self.user_id,
'username': self.username,
'photo_url': self.email_photo_url,
'large_photo_url': self.large_photo_url,
'location': self.location,
'num_subscribers': self.follower_count,
'feed_title': self.title,
'feed_address': "http://%s%s" % (domain, reverse('shared-stories-rss-feed',
kwargs={'user_id': self.user_id, 'username': self.username_slug})),
'feed_link': self.blurblog_url,
'protected': self.protected,
'private': self.private,
}
if not compact:
params.update({
'large_photo_url': self.large_photo_url,
'bio': self.bio,
'website': self.website,
'shared_stories_count': self.shared_stories_count,
'following_count': self.following_count,
'follower_count': self.follower_count,
'popular_publishers': json.decode(self.popular_publishers),
'stories_last_month': self.stories_last_month,
'average_stories_per_month': self.average_stories_per_month,
})
if include_settings:
params.update({
'custom_css': self.custom_css,
'custom_bgcolor': self.custom_bgcolor,
'bb_permalink_direct': self.bb_permalink_direct,
})
if include_follows:
params.update({
'photo_service': self.photo_service,
'following_user_ids': self.following_user_ids_without_self[:48],
'follower_user_ids': self.follower_user_ids_without_self[:48],
})
if common_follows_with_user:
FOLLOWERS_LIMIT = 128
with_user = MSocialProfile.get_user(common_follows_with_user)
followers_youknow, followers_everybody = with_user.common_follows(self.user_id, direction='followers')
following_youknow, following_everybody = with_user.common_follows(self.user_id, direction='following')
params['followers_youknow'] = followers_youknow[:FOLLOWERS_LIMIT]
params['followers_everybody'] = followers_everybody[:FOLLOWERS_LIMIT]
params['following_youknow'] = following_youknow[:FOLLOWERS_LIMIT]
params['following_everybody'] = following_everybody[:FOLLOWERS_LIMIT]
params['requested_follow'] = common_follows_with_user in self.requested_follow_user_ids
if include_following_user or common_follows_with_user:
if not include_following_user:
include_following_user = common_follows_with_user
if include_following_user != self.user_id:
params['followed_by_you'] = bool(self.is_followed_by_user(include_following_user))
params['following_you'] = self.is_following_user(include_following_user)
return params
@property
def following_user_ids_without_self(self):
if self.user_id in self.following_user_ids:
return [u for u in self.following_user_ids if u != self.user_id]
return self.following_user_ids
@property
def follower_user_ids_without_self(self):
if self.user_id in self.follower_user_ids:
return [u for u in self.follower_user_ids if u != self.user_id]
return self.follower_user_ids
def import_user_fields(self, skip_save=False):
user = User.objects.get(pk=self.user_id)
self.username = user.username
self.email = user.email
def count_follows(self, skip_save=False):
self.subscription_count = UserSubscription.objects.filter(user__pk=self.user_id).count()
self.shared_stories_count = MSharedStory.objects.filter(user_id=self.user_id).count()
self.following_count = len(self.following_user_ids_without_self)
self.follower_count = len(self.follower_user_ids_without_self)
if not skip_save:
self.save()
def follow_user(self, user_id, check_unfollowed=False, force=False):
r = redis.Redis(connection_pool=settings.REDIS_POOL)
if check_unfollowed and user_id in self.unfollowed_user_ids:
return
if self.user_id == user_id:
followee = self
else:
followee = MSocialProfile.get_user(user_id)
logging.debug(" ---> ~FB~SB%s~SN (%s) following %s" % (self.username, self.user_id, user_id))
if not followee.protected or force:
if user_id not in self.following_user_ids:
self.following_user_ids.append(user_id)
elif not force:
return
if user_id in self.unfollowed_user_ids:
self.unfollowed_user_ids.remove(user_id)
self.count_follows()
self.save()
if followee.protected and user_id != self.user_id and not force:
if self.user_id not in followee.requested_follow_user_ids:
followee.requested_follow_user_ids.append(self.user_id)
MFollowRequest.add(self.user_id, user_id)
elif self.user_id not in followee.follower_user_ids:
followee.follower_user_ids.append(self.user_id)
followee.count_follows()
followee.save()
if followee.protected and user_id != self.user_id and not force:
from apps.social.tasks import EmailFollowRequest
EmailFollowRequest.apply_async(kwargs=dict(follower_user_id=self.user_id,
followee_user_id=user_id),
countdown=settings.SECONDS_TO_DELAY_CELERY_EMAILS)
return
following_key = "F:%s:F" % (self.user_id)
r.sadd(following_key, user_id)
follower_key = "F:%s:f" % (user_id)
r.sadd(follower_key, self.user_id)
if user_id != self.user_id:
MInteraction.new_follow(follower_user_id=self.user_id, followee_user_id=user_id)
MActivity.new_follow(follower_user_id=self.user_id, followee_user_id=user_id)
socialsub, _ = MSocialSubscription.objects.get_or_create(user_id=self.user_id,
subscription_user_id=user_id)
socialsub.needs_unread_recalc = True
socialsub.save()
MFollowRequest.remove(self.user_id, user_id)
if not force:
from apps.social.tasks import EmailNewFollower
EmailNewFollower.apply_async(kwargs=dict(follower_user_id=self.user_id,
followee_user_id=user_id),
countdown=settings.SECONDS_TO_DELAY_CELERY_EMAILS)
return socialsub
def is_following_user(self, user_id):
# XXX TODO: Outsource to redis
return user_id in self.following_user_ids
def is_followed_by_user(self, user_id):
# XXX TODO: Outsource to redis
return user_id in self.follower_user_ids
def unfollow_user(self, user_id):
r = redis.Redis(connection_pool=settings.REDIS_POOL)
if not isinstance(user_id, int):
user_id = int(user_id)
if user_id == self.user_id:
# Only unfollow other people, not yourself.
return
if user_id in self.following_user_ids:
self.following_user_ids.remove(user_id)
if user_id not in self.unfollowed_user_ids:
self.unfollowed_user_ids.append(user_id)
self.count_follows()
self.save()
followee = MSocialProfile.get_user(user_id)
if self.user_id in followee.follower_user_ids:
followee.follower_user_ids.remove(self.user_id)
followee.count_follows()
followee.save()
if self.user_id in followee.requested_follow_user_ids:
followee.requested_follow_user_ids.remove(self.user_id)
followee.count_follows()
followee.save()
MFollowRequest.remove(self.user_id, user_id)
following_key = "F:%s:F" % (self.user_id)
r.srem(following_key, user_id)
follower_key = "F:%s:f" % (user_id)
r.srem(follower_key, self.user_id)
try:
MSocialSubscription.objects.get(user_id=self.user_id, subscription_user_id=user_id).delete()
except MSocialSubscription.DoesNotExist:
return False
def common_follows(self, user_id, direction='followers'):
r = redis.Redis(connection_pool=settings.REDIS_POOL)
my_followers = "F:%s:%s" % (self.user_id, 'F' if direction == 'followers' else 'F')
their_followers = "F:%s:%s" % (user_id, 'f' if direction == 'followers' else 'F')
follows_inter = r.sinter(their_followers, my_followers)
follows_diff = r.sdiff(their_followers, my_followers)
follows_inter = [int(f) for f in follows_inter]
follows_diff = [int(f) for f in follows_diff]
if user_id in follows_inter:
follows_inter.remove(user_id)
if user_id in follows_diff:
follows_diff.remove(user_id)
return follows_inter, follows_diff
def send_email_for_new_follower(self, follower_user_id):
user = User.objects.get(pk=self.user_id)
if follower_user_id not in self.follower_user_ids:
logging.user(user, "~FMNo longer being followed by %s" % follower_user_id)
return
if not user.email:
logging.user(user, "~FMNo email to send to, skipping.")
return
elif not user.profile.send_emails:
logging.user(user, "~FMDisabled emails, skipping.")
return
if self.user_id == follower_user_id:
return
emails_sent = MSentEmail.objects.filter(receiver_user_id=user.pk,
sending_user_id=follower_user_id,
email_type='new_follower')
day_ago = datetime.datetime.now() - datetime.timedelta(days=1)
for email in emails_sent:
if email.date_sent > day_ago:
logging.user(user, "~SK~FMNot sending new follower email, already sent before. NBD.")
return
follower_profile = MSocialProfile.get_user(follower_user_id)
common_followers, _ = self.common_follows(follower_user_id, direction='followers')
common_followings, _ = self.common_follows(follower_user_id, direction='following')
if self.user_id in common_followers:
common_followers.remove(self.user_id)
if self.user_id in common_followings:
common_followings.remove(self.user_id)
common_followers = MSocialProfile.profiles(common_followers)
common_followings = MSocialProfile.profiles(common_followings)
data = {
'user': user,
'follower_profile': follower_profile,
'common_followers': common_followers,
'common_followings': common_followings,
}
text = render_to_string('mail/email_new_follower.txt', data)
html = render_to_string('mail/email_new_follower.xhtml', data)
subject = "%s is now following your Blurblog on NewsBlur!" % follower_profile.username
msg = EmailMultiAlternatives(subject, text,
from_email='NewsBlur <%s>' % settings.HELLO_EMAIL,
to=['%s <%s>' % (user.username, user.email)])
msg.attach_alternative(html, "text/html")
msg.send()
MSentEmail.record(receiver_user_id=user.pk, sending_user_id=follower_user_id,
email_type='new_follower')
logging.user(user, "~BB~FM~SBSending email for new follower: %s" % follower_profile.username)
def send_email_for_follow_request(self, follower_user_id):
user = User.objects.get(pk=self.user_id)
if follower_user_id not in self.requested_follow_user_ids:
logging.user(user, "~FMNo longer being followed by %s" % follower_user_id)
return
if not user.email:
logging.user(user, "~FMNo email to send to, skipping.")
return
elif not user.profile.send_emails:
logging.user(user, "~FMDisabled emails, skipping.")
return
if self.user_id == follower_user_id:
return
emails_sent = MSentEmail.objects.filter(receiver_user_id=user.pk,
sending_user_id=follower_user_id,
email_type='follow_request')
day_ago = datetime.datetime.now() - datetime.timedelta(days=1)
for email in emails_sent:
if email.date_sent > day_ago:
logging.user(user, "~SK~FMNot sending follow request email, already sent before. NBD.")
return
follower_profile = MSocialProfile.get_user(follower_user_id)
common_followers, _ = self.common_follows(follower_user_id, direction='followers')
common_followings, _ = self.common_follows(follower_user_id, direction='following')
if self.user_id in common_followers:
common_followers.remove(self.user_id)
if self.user_id in common_followings:
common_followings.remove(self.user_id)
common_followers = MSocialProfile.profiles(common_followers)
common_followings = MSocialProfile.profiles(common_followings)
data = {
'user': user,
'follower_profile': follower_profile,
'common_followers': common_followers,
'common_followings': common_followings,
}
text = render_to_string('mail/email_follow_request.txt', data)
html = render_to_string('mail/email_follow_request.xhtml', data)
subject = "%s has requested to follow your Blurblog on NewsBlur" % follower_profile.username
msg = EmailMultiAlternatives(subject, text,
from_email='NewsBlur <%s>' % settings.HELLO_EMAIL,
to=['%s <%s>' % (user.username, user.email)])
msg.attach_alternative(html, "text/html")
msg.send()
MSentEmail.record(receiver_user_id=user.pk, sending_user_id=follower_user_id,
email_type='follow_request')
logging.user(user, "~BB~FM~SBSending email for follow request: %s" % follower_profile.username)
def save_feed_story_history_statistics(self):
"""
Fills in missing months between earlier occurances and now.
Save format: [('YYYY-MM, #), ...]
Example output: [(2010-12, 123), (2011-01, 146)]
"""
now = datetime.datetime.utcnow()
min_year = now.year
total = 0
month_count = 0
# Count stories, aggregate by year and month. Map Reduce!
map_f = """
function() {
var date = (this.shared_date.getFullYear()) + "-" + (this.shared_date.getMonth()+1);
emit(date, 1);
}
"""
reduce_f = """
function(key, values) {
var total = 0;
for (var i=0; i < values.length; i++) {
total += values[i];
}
return total;
}
"""
dates = {}
res = MSharedStory.objects(user_id=self.user_id).map_reduce(map_f, reduce_f, output='inline')
for r in res:
dates[r.key] = r.value
year = int(re.findall(r"(\d{4})-\d{1,2}", r.key)[0])
if year < min_year:
min_year = year
# Assemble a list with 0's filled in for missing months,
# trimming left and right 0's.
months = []
start = False
for year in range(min_year, now.year+1):
for month in range(1, 12+1):
if datetime.datetime(year, month, 1) < now:
key = u'%s-%s' % (year, month)
if dates.get(key) or start:
start = True
months.append((key, dates.get(key, 0)))
total += dates.get(key, 0)
month_count += 1
self.story_count_history = months
self.average_stories_per_month = total / max(1, month_count)
self.save()
def save_classifier_counts(self):
def calculate_scores(cls, facet):
map_f = """
function() {
emit(this["%s"], {
pos: this.score>0 ? this.score : 0,
neg: this.score<0 ? Math.abs(this.score) : 0
});
}
""" % (facet)
reduce_f = """
function(key, values) {
var result = {pos: 0, neg: 0};
values.forEach(function(value) {
result.pos += value.pos;
result.neg += value.neg;
});
return result;
}
"""
scores = []
res = cls.objects(social_user_id=self.user_id).map_reduce(map_f, reduce_f, output='inline')
for r in res:
facet_values = dict([(k, int(v)) for k,v in r.value.iteritems()])
facet_values[facet] = r.key
scores.append(facet_values)
scores = sorted(scores, key=lambda v: v['neg'] - v['pos'])
return scores
scores = {}
for cls, facet in [(MClassifierTitle, 'title'),
(MClassifierAuthor, 'author'),
(MClassifierTag, 'tag'),
(MClassifierFeed, 'feed_id')]:
scores[facet] = calculate_scores(cls, facet)
if facet == 'feed_id' and scores[facet]:
scores['feed'] = scores[facet]
del scores['feed_id']
elif not scores[facet]:
del scores[facet]
if scores:
self.feed_classifier_counts = scores
self.save()
class MSocialSubscription(mongo.Document):
UNREAD_CUTOFF = datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
user_id = mongo.IntField()
subscription_user_id = mongo.IntField(unique_with='user_id')
follow_date = mongo.DateTimeField(default=datetime.datetime.utcnow())
last_read_date = mongo.DateTimeField(default=UNREAD_CUTOFF)
mark_read_date = mongo.DateTimeField(default=UNREAD_CUTOFF)
unread_count_neutral = mongo.IntField(default=0)
unread_count_positive = mongo.IntField(default=0)
unread_count_negative = mongo.IntField(default=0)
unread_count_updated = mongo.DateTimeField()
oldest_unread_story_date = mongo.DateTimeField()
needs_unread_recalc = mongo.BooleanField(default=False)
feed_opens = mongo.IntField(default=0)
is_trained = mongo.BooleanField(default=False)
meta = {
'collection': 'social_subscription',
'indexes': [('user_id', 'subscription_user_id')],
'allow_inheritance': False,
}
def __unicode__(self):
user = User.objects.get(pk=self.user_id)
subscription_user = User.objects.get(pk=self.subscription_user_id)
return "Socialsub %s:%s" % (user, subscription_user)
@classmethod
def feeds(cls, user_id=None, subscription_user_id=None, calculate_all_scores=False,
update_counts=False, *args, **kwargs):
params = {
'user_id': user_id,
}
if subscription_user_id:
params["subscription_user_id"] = subscription_user_id
social_subs = cls.objects.filter(**params)
social_feeds = []
if social_subs:
if calculate_all_scores:
for s in social_subs: s.calculate_feed_scores()
# Fetch user profiles of subscriptions
social_user_ids = [sub.subscription_user_id for sub in social_subs]
social_profiles = MSocialProfile.profile_feeds(social_user_ids)
for social_sub in social_subs:
user_id = social_sub.subscription_user_id
if social_profiles[user_id]['shared_stories_count'] <= 0:
continue
if update_counts and social_sub.needs_unread_recalc:
social_sub.calculate_feed_scores()
# Combine subscription read counts with feed/user info
feed = dict(social_sub.canonical().items() + social_profiles[user_id].items())
social_feeds.append(feed)
return social_feeds
@classmethod
def feeds_with_updated_counts(cls, user, social_feed_ids=None):
feeds = {}
# Get social subscriptions for user
user_subs = cls.objects.filter(user_id=user.pk)
if social_feed_ids:
social_user_ids = [int(f.replace('social:', '')) for f in social_feed_ids]
user_subs = user_subs.filter(subscription_user_id__in=social_user_ids)
profiles = MSocialProfile.objects.filter(user_id__in=social_user_ids)
profiles = dict((p.user_id, p) for p in profiles)
for i, sub in enumerate(user_subs):
# Count unreads if subscription is stale.
if (sub.needs_unread_recalc or
(sub.unread_count_updated and
sub.unread_count_updated < user.profile.unread_cutoff) or
(sub.oldest_unread_story_date and
sub.oldest_unread_story_date < user.profile.unread_cutoff)):
sub = sub.calculate_feed_scores(force=True, silent=True)
feed_id = "social:%s" % sub.subscription_user_id
feeds[feed_id] = {
'ps': sub.unread_count_positive,
'nt': sub.unread_count_neutral,
'ng': sub.unread_count_negative,
'id': feed_id,
}
if social_feed_ids and sub.subscription_user_id in profiles:
feeds[feed_id]['shared_stories_count'] = profiles[sub.subscription_user_id].shared_stories_count
return feeds
def canonical(self):
return {
'user_id': self.user_id,
'subscription_user_id': self.subscription_user_id,
'nt': self.unread_count_neutral,
'ps': self.unread_count_positive,
'ng': self.unread_count_negative,
'is_trained': self.is_trained,
'feed_opens': self.feed_opens,
}
@classmethod
def subs_for_users(cls, user_id, subscription_user_ids=None, read_filter="unread"):
socialsubs = cls.objects
if read_filter == "unread":
socialsubs = socialsubs.filter(Q(unread_count_neutral__gt=0) |
Q(unread_count_positive__gt=0))
if not subscription_user_ids:
socialsubs = socialsubs.filter(user_id=user_id)\
.only('subscription_user_id', 'mark_read_date', 'is_trained')
else:
socialsubs = socialsubs.filter(user_id=user_id,
subscription_user_id__in=subscription_user_ids)\
.only('subscription_user_id', 'mark_read_date', 'is_trained')
return socialsubs
@classmethod
def story_hashes(cls, user_id, relative_user_id, subscription_user_ids=None, socialsubs=None,
read_filter="unread", order="newest",
include_timestamps=False, group_by_user=True, cutoff_date=None):
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
pipeline = r.pipeline()
story_hashes = {} if group_by_user else []
if not socialsubs:
socialsubs = cls.subs_for_users(relative_user_id,
subscription_user_ids=subscription_user_ids,
read_filter=read_filter)
subscription_user_ids = [sub.subscription_user_id for sub in socialsubs]
if not subscription_user_ids:
return story_hashes
read_dates = dict((us.subscription_user_id,
int(us.mark_read_date.strftime('%s'))) for us in socialsubs)
current_time = int(time.time() + 60*60*24)
if not cutoff_date:
cutoff_date = datetime.datetime.now() - datetime.timedelta(days=settings.DAYS_OF_STORY_HASHES)
unread_timestamp = int(time.mktime(cutoff_date.timetuple()))-1000
feed_counter = 0
for sub_user_id_group in chunks(subscription_user_ids, 20):
pipeline = r.pipeline()
for sub_user_id in sub_user_id_group:
stories_key = 'B:%s' % (sub_user_id)
sorted_stories_key = 'zB:%s' % (sub_user_id)
read_stories_key = 'RS:%s' % (user_id)
read_social_stories_key = 'RS:%s:B:%s' % (user_id, sub_user_id)
unread_stories_key = 'UB:%s:%s' % (user_id, sub_user_id)
sorted_stories_key = 'zB:%s' % (sub_user_id)
unread_ranked_stories_key = 'zUB:%s:%s' % (user_id, sub_user_id)
expire_unread_stories_key = False
max_score = current_time
if read_filter == 'unread':
# +1 for the intersection b/w zF and F, which carries an implicit score of 1.
min_score = read_dates[sub_user_id] + 1
pipeline.sdiffstore(unread_stories_key, stories_key, read_stories_key)
pipeline.sdiffstore(unread_stories_key, unread_stories_key, read_social_stories_key)
expire_unread_stories_key = True
else:
min_score = unread_timestamp
unread_stories_key = stories_key
if order == 'oldest':
byscorefunc = pipeline.zrangebyscore
else:
byscorefunc = pipeline.zrevrangebyscore
min_score, max_score = max_score, min_score
pipeline.zinterstore(unread_ranked_stories_key, [sorted_stories_key, unread_stories_key])
byscorefunc(unread_ranked_stories_key, min_score, max_score, withscores=include_timestamps)
pipeline.delete(unread_ranked_stories_key)
if expire_unread_stories_key:
pipeline.delete(unread_stories_key)
results = pipeline.execute()
for hashes in results:
if not isinstance(hashes, list): continue
if group_by_user:
story_hashes[subscription_user_ids[feed_counter]] = hashes
feed_counter += 1
else:
story_hashes.extend(hashes)
return story_hashes
def get_stories(self, offset=0, limit=6, order='newest', read_filter='all',
withscores=False, hashes_only=False, cutoff_date=None,
mark_read_complement=False):
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
ignore_user_stories = False
stories_key = 'B:%s' % (self.subscription_user_id)
read_stories_key = 'RS:%s' % (self.user_id)
read_social_stories_key = 'RS:%s:B:%s' % (self.user_id, self.subscription_user_id)
unread_stories_key = 'UB:%s:%s' % (self.user_id, self.subscription_user_id)
if not r.exists(stories_key):
return []
elif read_filter != 'unread' or not r.exists(read_stories_key):
ignore_user_stories = True
unread_stories_key = stories_key
else:
r.sdiffstore(unread_stories_key, stories_key, read_stories_key)
r.sdiffstore(unread_stories_key, unread_stories_key, read_social_stories_key)
sorted_stories_key = 'zB:%s' % (self.subscription_user_id)
unread_ranked_stories_key = 'z%sUB:%s:%s' % ('h' if hashes_only else '',
self.user_id, self.subscription_user_id)
r.zinterstore(unread_ranked_stories_key, [sorted_stories_key, unread_stories_key])
now = datetime.datetime.now()
current_time = int(time.time() + 60*60*24)
mark_read_time = int(time.mktime(self.mark_read_date.timetuple())) + 1
if cutoff_date:
mark_read_time = int(time.mktime(cutoff_date.timetuple())) + 1
if order == 'oldest':
byscorefunc = r.zrangebyscore
min_score = mark_read_time
max_score = current_time
else: # newest
byscorefunc = r.zrevrangebyscore
min_score = current_time
if mark_read_complement:
min_score = mark_read_time
now = datetime.datetime.now()
unread_cutoff = cutoff_date
if not unread_cutoff:
unread_cutoff = now - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
max_score = int(time.mktime(unread_cutoff.timetuple()))-1
story_ids = byscorefunc(unread_ranked_stories_key, min_score,
max_score, start=offset, num=limit,
withscores=withscores)
if withscores:
story_ids = [(s[0], int(s[1])) for s in story_ids]
r.expire(unread_ranked_stories_key, 1*60*60)
if not ignore_user_stories:
r.delete(unread_stories_key)
return story_ids
@classmethod
def feed_stories(cls, user_id, social_user_ids, offset=0, limit=6,
order='newest', read_filter='all', relative_user_id=None, cache=True,
socialsubs=None, cutoff_date=None):
rt = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_TEMP_POOL)
if not relative_user_id:
relative_user_id = user_id
if order == 'oldest':
range_func = rt.zrange
else:
range_func = rt.zrevrange
if not isinstance(social_user_ids, list):
social_user_ids = [social_user_ids]
ranked_stories_keys = 'zU:%s:social' % (user_id)
unread_ranked_stories_keys = 'zhU:%s:social' % (user_id)
if (offset and cache and
rt.exists(ranked_stories_keys) and
rt.exists(unread_ranked_stories_keys)):
story_hashes_and_dates = range_func(ranked_stories_keys, offset, limit, withscores=True)
if not story_hashes_and_dates:
return [], [], []
story_hashes, story_dates = zip(*story_hashes_and_dates)
if read_filter == "unread":
unread_story_hashes = story_hashes
else:
unread_story_hashes = range_func(unread_ranked_stories_keys, 0, offset+limit)
return story_hashes, story_dates, unread_story_hashes
else:
rt.delete(ranked_stories_keys)
rt.delete(unread_ranked_stories_keys)
story_hashes = cls.story_hashes(user_id, relative_user_id,
subscription_user_ids=social_user_ids,
read_filter=read_filter, order=order,
include_timestamps=True,
group_by_user=False,
socialsubs=socialsubs,
cutoff_date=cutoff_date)
if not story_hashes:
return [], [], []
pipeline = rt.pipeline()
for story_hash_group in chunks(story_hashes, 100):
pipeline.zadd(ranked_stories_keys, **dict(story_hash_group))
pipeline.execute()
story_hashes_and_dates = range_func(ranked_stories_keys, offset, limit, withscores=True)
if not story_hashes_and_dates:
return [], [], []
story_hashes, story_dates = zip(*story_hashes_and_dates)
if read_filter == "unread":
unread_feed_story_hashes = story_hashes
rt.zunionstore(unread_ranked_stories_keys, [ranked_stories_keys])
else:
unread_story_hashes = cls.story_hashes(user_id, relative_user_id,
subscription_user_ids=social_user_ids,
read_filter="unread", order=order,
include_timestamps=True,
group_by_user=False,
socialsubs=socialsubs,
cutoff_date=cutoff_date)
if unread_story_hashes:
pipeline = rt.pipeline()
for unread_story_hash_group in chunks(unread_story_hashes, 100):
pipeline.zadd(unread_ranked_stories_keys, **dict(unread_story_hash_group))
pipeline.execute()
unread_feed_story_hashes = range_func(unread_ranked_stories_keys, offset, limit)
rt.expire(ranked_stories_keys, 60*60)
rt.expire(unread_ranked_stories_keys, 60*60)
return story_hashes, story_dates, unread_feed_story_hashes
def mark_story_ids_as_read(self, story_hashes, feed_id=None, mark_all_read=False, request=None):
data = dict(code=0, payload=story_hashes)
r = redis.Redis(connection_pool=settings.REDIS_POOL)
if not request:
request = User.objects.get(pk=self.user_id)
if not self.needs_unread_recalc and not mark_all_read:
self.needs_unread_recalc = True
self.save()
sub_username = MSocialProfile.get_user(self.subscription_user_id).username
if len(story_hashes) > 1:
logging.user(request, "~FYRead %s stories in social subscription: %s" % (len(story_hashes), sub_username))
else:
logging.user(request, "~FYRead story in social subscription: %s" % (sub_username))
for story_hash in set(story_hashes):
if feed_id is not None:
story_hash = MStory.ensure_story_hash(story_hash, story_feed_id=feed_id)
if feed_id is None:
feed_id, _ = MStory.split_story_hash(story_hash)
# Find other social feeds with this story to update their counts
friend_key = "F:%s:F" % (self.user_id)
share_key = "S:%s" % (story_hash)
friends_with_shares = [int(f) for f in r.sinter(share_key, friend_key)]
RUserStory.mark_read(self.user_id, feed_id, story_hash, social_user_ids=friends_with_shares,
aggregated=mark_all_read)
if self.user_id in friends_with_shares:
friends_with_shares.remove(self.user_id)
if friends_with_shares:
socialsubs = MSocialSubscription.objects.filter(
user_id=self.user_id,
subscription_user_id__in=friends_with_shares)
for socialsub in socialsubs:
if not socialsub.needs_unread_recalc and not mark_all_read:
socialsub.needs_unread_recalc = True
socialsub.save()
# Also count on original subscription
usersubs = UserSubscription.objects.filter(user=self.user_id, feed=feed_id)
if usersubs:
usersub = usersubs[0]
if not usersub.needs_unread_recalc:
usersub.needs_unread_recalc = True
usersub.save()
return data
@classmethod
def mark_unsub_story_ids_as_read(cls, user_id, social_user_id, story_ids, feed_id=None,
request=None):
data = dict(code=0, payload=story_ids)
r = redis.Redis(connection_pool=settings.REDIS_POOL)
if not request:
request = User.objects.get(pk=user_id)
if len(story_ids) > 1:
logging.user(request, "~FYRead %s social stories from global" % (len(story_ids)))
else:
logging.user(request, "~FYRead social story from global")
for story_id in set(story_ids):
try:
story = MSharedStory.objects.get(user_id=social_user_id,
story_guid=story_id)
except MSharedStory.DoesNotExist:
continue
# Find other social feeds with this story to update their counts
friend_key = "F:%s:F" % (user_id)
share_key = "S:%s" % (story.story_hash)
friends_with_shares = [int(f) for f in r.sinter(share_key, friend_key)]
RUserStory.mark_read(user_id, story.story_feed_id, story.story_hash,
social_user_ids=friends_with_shares)
# Also count on original subscription
usersubs = UserSubscription.objects.filter(user=user_id, feed=story.story_feed_id)
if usersubs:
usersub = usersubs[0]
if not usersub.needs_unread_recalc:
usersub.needs_unread_recalc = True
usersub.save()
# XXX TODO: Real-time notification, just for this user
return data
def mark_feed_read(self, cutoff_date=None):
user_profile = Profile.objects.get(user_id=self.user_id)
recount = True
if cutoff_date:
cutoff_date = cutoff_date + datetime.timedelta(seconds=1)
else:
# Use the latest story to get last read time.
latest_shared_story = MSharedStory.objects(user_id=self.subscription_user_id,
shared_date__gte=user_profile.unread_cutoff
).order_by('-shared_date').only('shared_date').first()
if latest_shared_story:
cutoff_date = latest_shared_story['shared_date'] + datetime.timedelta(seconds=1)
else:
cutoff_date = datetime.datetime.utcnow()
recount = False
self.last_read_date = cutoff_date
self.mark_read_date = cutoff_date
self.oldest_unread_story_date = cutoff_date
if not recount:
self.unread_count_negative = 0
self.unread_count_positive = 0
self.unread_count_neutral = 0
self.unread_count_updated = datetime.datetime.utcnow()
self.needs_unread_recalc = False
else:
self.needs_unread_recalc = True
# Manually mark all shared stories as read.
unread_story_hashes = self.get_stories(read_filter='unread', limit=500, hashes_only=True,
mark_read_complement=True)
self.mark_story_ids_as_read(unread_story_hashes, mark_all_read=True)
self.save()
def calculate_feed_scores(self, force=False, silent=False):
if not self.needs_unread_recalc and not force:
return self
now = datetime.datetime.now()
user_profile = Profile.objects.get(user_id=self.user_id)
if user_profile.last_seen_on < user_profile.unread_cutoff:
# if not silent:
# logging.info(' ---> [%s] SKIPPING Computing scores: %s (1 week+)' % (self.user, self.feed))
return self
feed_scores = dict(negative=0, neutral=0, positive=0)
# Two weeks in age. If mark_read_date is older, mark old stories as read.
date_delta = user_profile.unread_cutoff
if date_delta < self.mark_read_date:
date_delta = self.mark_read_date
else:
self.mark_read_date = date_delta
unread_story_hashes = self.get_stories(read_filter='unread', limit=500, hashes_only=True,
cutoff_date=user_profile.unread_cutoff)
stories_db = MSharedStory.objects(user_id=self.subscription_user_id,
story_hash__in=unread_story_hashes)
story_feed_ids = set()
for s in stories_db:
story_feed_ids.add(s['story_feed_id'])
story_feed_ids = list(story_feed_ids)
usersubs = UserSubscription.objects.filter(user__pk=self.user_id, feed__pk__in=story_feed_ids)
usersubs_map = dict((sub.feed_id, sub) for sub in usersubs)
oldest_unread_story_date = now
unread_stories_db = []
for story in stories_db:
if story['story_hash'] not in unread_story_hashes:
continue
feed_id = story.story_feed_id
if usersubs_map.get(feed_id) and story.shared_date < usersubs_map[feed_id].mark_read_date:
continue
unread_stories_db.append(story)
if story.shared_date < oldest_unread_story_date:
oldest_unread_story_date = story.shared_date
stories = Feed.format_stories(unread_stories_db)
classifier_feeds = list(MClassifierFeed.objects(user_id=self.user_id, social_user_id=self.subscription_user_id))
classifier_authors = list(MClassifierAuthor.objects(user_id=self.user_id, social_user_id=self.subscription_user_id))
classifier_titles = list(MClassifierTitle.objects(user_id=self.user_id, social_user_id=self.subscription_user_id))
classifier_tags = list(MClassifierTag.objects(user_id=self.user_id, social_user_id=self.subscription_user_id))
# Merge with feed specific classifiers
if story_feed_ids:
classifier_feeds = classifier_feeds + list(MClassifierFeed.objects(user_id=self.user_id,
feed_id__in=story_feed_ids))
classifier_authors = classifier_authors + list(MClassifierAuthor.objects(user_id=self.user_id,
feed_id__in=story_feed_ids))
classifier_titles = classifier_titles + list(MClassifierTitle.objects(user_id=self.user_id,
feed_id__in=story_feed_ids))
classifier_tags = classifier_tags + list(MClassifierTag.objects(user_id=self.user_id,
feed_id__in=story_feed_ids))
for story in stories:
scores = {
'feed' : apply_classifier_feeds(classifier_feeds, story['story_feed_id'],
social_user_ids=self.subscription_user_id),
'author' : apply_classifier_authors(classifier_authors, story),
'tags' : apply_classifier_tags(classifier_tags, story),
'title' : apply_classifier_titles(classifier_titles, story),
}
max_score = max(scores['author'], scores['tags'], scores['title'])
min_score = min(scores['author'], scores['tags'], scores['title'])
if max_score > 0:
feed_scores['positive'] += 1
elif min_score < 0:
feed_scores['negative'] += 1
else:
if scores['feed'] > 0:
feed_scores['positive'] += 1
elif scores['feed'] < 0:
feed_scores['negative'] += 1
else:
feed_scores['neutral'] += 1
self.unread_count_positive = feed_scores['positive']
self.unread_count_neutral = feed_scores['neutral']
self.unread_count_negative = feed_scores['negative']
self.unread_count_updated = datetime.datetime.now()
self.oldest_unread_story_date = oldest_unread_story_date
self.needs_unread_recalc = False
self.save()
if (self.unread_count_positive == 0 and
self.unread_count_neutral == 0):
self.mark_feed_read()
if not silent:
logging.info(' ---> [%s] Computing social scores: %s (%s/%s/%s)' % (user_profile, self.subscription_user_id, feed_scores['negative'], feed_scores['neutral'], feed_scores['positive']))
return self
@classmethod
def mark_dirty_sharing_story(cls, user_id, story_feed_id, story_guid_hash):
r = redis.Redis(connection_pool=settings.REDIS_POOL)
friends_key = "F:%s:F" % (user_id)
share_key = "S:%s:%s" % (story_feed_id, story_guid_hash)
following_user_ids = r.sinter(friends_key, share_key)
following_user_ids = [int(f) for f in following_user_ids]
if not following_user_ids:
return None
social_subs = cls.objects.filter(user_id=user_id, subscription_user_id__in=following_user_ids)
for social_sub in social_subs:
social_sub.needs_unread_recalc = True
social_sub.save()
return social_subs
class MCommentReply(mongo.EmbeddedDocument):
reply_id = mongo.ObjectIdField()
user_id = mongo.IntField()
publish_date = mongo.DateTimeField()
comments = mongo.StringField()
email_sent = mongo.BooleanField(default=False)
liking_users = mongo.ListField(mongo.IntField())
def canonical(self):
reply = {
'reply_id': self.reply_id,
'user_id': self.user_id,
'publish_date': relative_timesince(self.publish_date),
'date': self.publish_date,
'comments': self.comments,
}
return reply
meta = {
'ordering': ['publish_date'],
'id_field': 'reply_id',
'allow_inheritance': False,
}
class MSharedStory(mongo.Document):
user_id = mongo.IntField()
shared_date = mongo.DateTimeField()
comments = mongo.StringField()
has_comments = mongo.BooleanField(default=False)
has_replies = mongo.BooleanField(default=False)
replies = mongo.ListField(mongo.EmbeddedDocumentField(MCommentReply))
source_user_id = mongo.IntField()
story_hash = mongo.StringField()
story_feed_id = mongo.IntField()
story_date = mongo.DateTimeField()
story_title = mongo.StringField(max_length=1024)
story_content = mongo.StringField()
story_content_z = mongo.BinaryField()
story_original_content = mongo.StringField()
story_original_content_z = mongo.BinaryField()
original_text_z = mongo.BinaryField()
story_content_type = mongo.StringField(max_length=255)
story_author_name = mongo.StringField()
story_permalink = mongo.StringField()
story_guid = mongo.StringField(unique_with=('user_id',))
story_guid_hash = mongo.StringField(max_length=6)
image_urls = mongo.ListField(mongo.StringField(max_length=1024))
story_tags = mongo.ListField(mongo.StringField(max_length=250))
posted_to_services = mongo.ListField(mongo.StringField(max_length=20))
mute_email_users = mongo.ListField(mongo.IntField())
liking_users = mongo.ListField(mongo.IntField())
emailed_reshare = mongo.BooleanField(default=False)
emailed_replies = mongo.ListField(mongo.ObjectIdField())
image_count = mongo.IntField()
image_sizes = mongo.ListField(mongo.DictField())
meta = {
'collection': 'shared_stories',
'indexes': [('user_id', '-shared_date'), ('user_id', 'story_feed_id'),
'shared_date', 'story_guid', 'story_feed_id', 'story_hash'],
'index_drop_dups': True,
'ordering': ['-shared_date'],
'allow_inheritance': False,
}
def __unicode__(self):
user = User.objects.get(pk=self.user_id)
return "%s: %s (%s)%s%s" % (user.username,
self.decoded_story_title[:20],
self.story_feed_id,
': ' if self.has_comments else '',
self.comments[:20])
@property
def guid_hash(self):
return hashlib.sha1(self.story_guid).hexdigest()[:6]
@property
def feed_guid_hash(self):
return "%s:%s" % (self.story_feed_id or "0", self.guid_hash)
@property
def decoded_story_title(self):
h = HTMLParser.HTMLParser()
return h.unescape(self.story_title)
def canonical(self):
return {
"user_id": self.user_id,
"shared_date": self.shared_date,
"story_title": self.story_title,
"story_content": self.story_content_z and zlib.decompress(self.story_content_z),
"comments": self.comments,
}
def save(self, *args, **kwargs):
scrubber = SelectiveScriptScrubber()
if self.story_content:
self.story_content = scrubber.scrub(self.story_content)
self.story_content_z = zlib.compress(self.story_content)
self.story_content = None
if self.story_original_content:
self.story_original_content_z = zlib.compress(self.story_original_content)
self.story_original_content = None
self.story_guid_hash = hashlib.sha1(self.story_guid).hexdigest()[:6]
self.story_title = strip_tags(self.story_title)
self.story_hash = self.feed_guid_hash
self.comments = linkify(strip_tags(self.comments))
for reply in self.replies:
reply.comments = linkify(strip_tags(reply.comments))
self.shared_date = self.shared_date or datetime.datetime.utcnow()
self.has_replies = bool(len(self.replies))
super(MSharedStory, self).save(*args, **kwargs)
author = MSocialProfile.get_user(self.user_id)
author.count_follows()
self.sync_redis()
MActivity.new_shared_story(user_id=self.user_id, source_user_id=self.source_user_id,
story_title=self.story_title,
comments=self.comments, story_feed_id=self.story_feed_id,
story_id=self.story_guid, share_date=self.shared_date)
return self
def delete(self, *args, **kwargs):
MActivity.remove_shared_story(user_id=self.user_id, story_feed_id=self.story_feed_id,
story_id=self.story_guid)
self.remove_from_redis()
super(MSharedStory, self).delete(*args, **kwargs)
def unshare_story(self):
socialsubs = MSocialSubscription.objects.filter(subscription_user_id=self.user_id,
needs_unread_recalc=False)
for socialsub in socialsubs:
socialsub.needs_unread_recalc = True
socialsub.save()
self.delete()
@classmethod
def feed_quota(cls, user_id, feed_id, days=1, quota=1):
day_ago = datetime.datetime.now()-datetime.timedelta(days=days)
shared_count = cls.objects.filter(shared_date__gte=day_ago, story_feed_id=feed_id).count()
return shared_count >= quota
@classmethod
def count_potential_spammers(cls, days=1):
day_ago = datetime.datetime.now()-datetime.timedelta(days=days)
stories = cls.objects.filter(shared_date__gte=day_ago)
shared = [{'u': s.user_id, 'f': s.story_feed_id} for s in stories]
ddusers = defaultdict(lambda: defaultdict(int))
for story in shared:
ddusers[story['u']][story['f']] += 1
users = {}
for user_id, feeds in ddusers.items():
users[user_id] = dict(feeds)
pprint(users)
return users
@classmethod
def get_shared_stories_from_site(cls, feed_id, user_id, story_url, limit=3):
your_story = cls.objects.filter(story_feed_id=feed_id,
story_permalink=story_url,
user_id=user_id).limit(1).first()
same_stories = cls.objects.filter(story_feed_id=feed_id,
story_permalink=story_url,
user_id__ne=user_id
).order_by('-shared_date')
same_stories = [{
"user_id": story.user_id,
"comments": story.comments,
"relative_date": relative_timesince(story.shared_date),
"blurblog_permalink": story.blurblog_permalink(),
} for story in same_stories]
other_stories = []
if feed_id:
other_stories = cls.objects.filter(story_feed_id=feed_id,
story_permalink__ne=story_url
).order_by('-shared_date').limit(limit)
other_stories = [{
"user_id": story.user_id,
"story_title": story.story_title,
"story_permalink": story.story_permalink,
"comments": story.comments,
"relative_date": relative_timesince(story.shared_date),
"blurblog_permalink": story.blurblog_permalink(),
} for story in other_stories]
return your_story, same_stories, other_stories
def set_source_user_id(self, source_user_id):
if source_user_id == self.user_id:
return
def find_source(source_user_id, seen_user_ids):
parent_shared_story = MSharedStory.objects.filter(user_id=source_user_id,
story_guid=self.story_guid,
story_feed_id=self.story_feed_id).limit(1)
if parent_shared_story and parent_shared_story[0].source_user_id:
user_id = parent_shared_story[0].source_user_id
if user_id in seen_user_ids:
return source_user_id
else:
seen_user_ids.append(user_id)
return find_source(user_id, seen_user_ids)
else:
return source_user_id
if source_user_id:
source_user_id = find_source(source_user_id, [])
if source_user_id == self.user_id:
return
elif not self.source_user_id or source_user_id != self.source_user_id:
self.source_user_id = source_user_id
logging.debug(" ---> Re-share from %s." % source_user_id)
self.save()
MInteraction.new_reshared_story(user_id=self.source_user_id,
reshare_user_id=self.user_id,
comments=self.comments,
story_title=self.story_title,
story_feed_id=self.story_feed_id,
story_id=self.story_guid)
def mute_for_user(self, user_id):
if user_id not in self.mute_email_users:
self.mute_email_users.append(user_id)
self.save()
@classmethod
def switch_feed(cls, original_feed_id, duplicate_feed_id):
shared_stories = cls.objects.filter(story_feed_id=duplicate_feed_id)
logging.info(" ---> %s shared stories" % shared_stories.count())
for story in shared_stories:
story.story_feed_id = original_feed_id
story.save()
@classmethod
def collect_popular_stories(cls, cutoff=None, days=None, shared_feed_ids=None):
if not days:
days = 3
if not cutoff:
cutoff = 6
if not shared_feed_ids:
shared_feed_ids = []
# shared_stories_count = sum(json.decode(MStatistics.get('stories_shared')))
# cutoff = cutoff or max(math.floor(.025 * shared_stories_count), 3)
today = datetime.datetime.now() - datetime.timedelta(days=days)
map_f = """
function() {
emit(this.story_hash, {
'story_hash': this.story_hash,
'feed_id': this.story_feed_id,
'title': this.story_title,
'count': 1
});
}
"""
reduce_f = """
function(key, values) {
var r = {'story_hash': key, 'count': 0};
for (var i=0; i < values.length; i++) {
r.feed_id = values[i].feed_id;
r.title = values[i].title;
r.count += values[i].count;
}
return r;
}
"""
finalize_f = """
function(key, value) {
if (value.count >= %(cutoff)s && [%(shared_feed_ids)s].indexOf(value.feed_id) == -1) {
var english_title = value.title.replace(/[^\\062-\\177]/g, "");
if (english_title.length < 5) return;
return value;
}
}
""" % {'cutoff': cutoff, 'shared_feed_ids': ', '.join(shared_feed_ids)}
res = cls.objects(shared_date__gte=today).map_reduce(map_f, reduce_f,
finalize_f=finalize_f,
output='inline')
stories = dict([(r.key, r.value) for r in res if r.value])
return stories, cutoff
@classmethod
def share_popular_stories(cls, cutoff=None, days=None, interactive=True):
publish_new_stories = False
popular_profile = MSocialProfile.objects.get(username='popular')
popular_user = User.objects.get(pk=popular_profile.user_id)
week_ago = datetime.datetime.now() - datetime.timedelta(days=7)
shared_feed_ids = [str(s.story_feed_id)
for s in MSharedStory.objects(user_id=popular_profile.user_id,
shared_date__gte=week_ago).only('story_feed_id')]
shared_stories_today, cutoff = cls.collect_popular_stories(cutoff=cutoff, days=days,
shared_feed_ids=shared_feed_ids)
shared = 0
for story_hash, story_info in shared_stories_today.items():
story, _ = MStory.find_story(story_info['feed_id'], story_info['story_hash'])
if not story:
logging.user(popular_user, "~FRPopular stories, story not found: %s" % story_info)
continue
if story.story_feed_id in shared_feed_ids:
logging.user(popular_user, "~FRPopular stories, story feed just shared: %s" % story_info)
continue
if interactive:
feed = Feed.get_by_id(story.story_feed_id)
accept_story = raw_input("%s / %s [Y/n]: " % (story.decoded_story_title, feed.title))
if accept_story in ['n', 'N']: continue
story_db = dict([(k, v) for k, v in story._data.items()
if k is not None and v is not None])
story_db.pop('user_id', None)
story_db.pop('id', None)
story_db.pop('comments', None)
story_db.pop('replies', None)
story_db['has_comments'] = False
story_db['has_replies'] = False
story_db['shared_date'] = datetime.datetime.now()
story_values = {
'user_id': popular_profile.user_id,
'story_guid': story_db['story_guid'],
'defaults': story_db,
}
shared_story, created = MSharedStory.objects.get_or_create(**story_values)
if created:
shared_story.post_to_service('twitter')
shared += 1
shared_feed_ids.append(story.story_feed_id)
publish_new_stories = True
logging.user(popular_user, "~FCSharing: ~SB~FM%s (%s shares, %s min)" % (
story.decoded_story_title[:50],
story_info['count'],
cutoff))
if publish_new_stories:
socialsubs = MSocialSubscription.objects.filter(subscription_user_id=popular_user.pk)
for socialsub in socialsubs:
socialsub.needs_unread_recalc = True
socialsub.save()
shared_story.publish_update_to_subscribers()
return shared
@staticmethod
def check_shared_story_hashes(user_id, story_hashes, r=None):
if not r:
r = redis.Redis(connection_pool=settings.REDIS_POOL)
pipeline = r.pipeline()
for story_hash in story_hashes:
feed_id, guid_hash = MStory.split_story_hash(story_hash)
share_key = "S:%s:%s" % (feed_id, guid_hash)
pipeline.sismember(share_key, user_id)
shared_hashes = pipeline.execute()
return [story_hash for s, story_hash in enumerate(story_hashes) if shared_hashes[s]]
@classmethod
def sync_all_redis(cls, drop=False):
r = redis.Redis(connection_pool=settings.REDIS_POOL)
h = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
# h2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
if drop:
for key_name in ["C", "S"]:
keys = r.keys("%s:*" % key_name)
print " ---> Removing %s keys named %s:*" % (len(keys), key_name)
for key in keys:
r.delete(key)
for story in cls.objects.all():
story.sync_redis_shares(r=r)
story.sync_redis_story(r=h)
def sync_redis(self):
self.sync_redis_shares()
self.sync_redis_story()
def sync_redis_shares(self, r=None):
if not r:
r = redis.Redis(connection_pool=settings.REDIS_POOL)
share_key = "S:%s:%s" % (self.story_feed_id, self.guid_hash)
comment_key = "C:%s:%s" % (self.story_feed_id, self.guid_hash)
r.sadd(share_key, self.user_id)
if self.has_comments:
r.sadd(comment_key, self.user_id)
else:
r.srem(comment_key, self.user_id)
def sync_redis_story(self, r=None):
if not r:
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
# if not r2:
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
r.sadd('B:%s' % self.user_id, self.feed_guid_hash)
# r2.sadd('B:%s' % self.user_id, self.feed_guid_hash)
r.zadd('zB:%s' % self.user_id, self.feed_guid_hash,
time.mktime(self.shared_date.timetuple()))
# r2.zadd('zB:%s' % self.user_id, self.feed_guid_hash,
# time.mktime(self.shared_date.timetuple()))
r.expire('B:%s' % self.user_id, settings.DAYS_OF_STORY_HASHES*24*60*60)
# r2.expire('B:%s' % self.user_id, settings.DAYS_OF_STORY_HASHES*24*60*60)
r.expire('zB:%s' % self.user_id, settings.DAYS_OF_STORY_HASHES*24*60*60)
# r2.expire('zB:%s' % self.user_id, settings.DAYS_OF_STORY_HASHES*24*60*60)
def remove_from_redis(self):
r = redis.Redis(connection_pool=settings.REDIS_POOL)
share_key = "S:%s:%s" % (self.story_feed_id, self.guid_hash)
r.srem(share_key, self.user_id)
comment_key = "C:%s:%s" % (self.story_feed_id, self.guid_hash)
r.srem(comment_key, self.user_id)
h = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
# h2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
h.srem('B:%s' % self.user_id, self.feed_guid_hash)
# h2.srem('B:%s' % self.user_id, self.feed_guid_hash)
h.zrem('zB:%s' % self.user_id, self.feed_guid_hash)
# h2.zrem('zB:%s' % self.user_id, self.feed_guid_hash)
def publish_update_to_subscribers(self):
try:
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
feed_id = "social:%s" % self.user_id
listeners_count = r.publish(feed_id, 'story:new')
if listeners_count:
logging.debug(" ---> ~FMPublished to %s subscribers" % (listeners_count))
except redis.ConnectionError:
logging.debug(" ***> ~BMRedis is unavailable for real-time.")
def comments_with_author(self):
comments = {
'id': self.id,
'user_id': self.user_id,
'comments': self.comments,
'shared_date': relative_timesince(self.shared_date),
'date': self.shared_date,
'replies': [reply.canonical() for reply in self.replies],
'liking_users': self.liking_users and list(self.liking_users),
'source_user_id': self.source_user_id,
}
return comments
def comment_with_author_and_profiles(self):
comment = self.comments_with_author()
profile_user_ids = set([comment['user_id']])
reply_user_ids = [reply['user_id'] for reply in comment['replies']]
profile_user_ids = profile_user_ids.union(reply_user_ids)
profile_user_ids = profile_user_ids.union(comment['liking_users'])
if comment['source_user_id']:
profile_user_ids.add(comment['source_user_id'])
profiles = MSocialProfile.objects.filter(user_id__in=list(profile_user_ids))
profiles = [profile.canonical(compact=True) for profile in profiles]
return comment, profiles
@classmethod
def stories_with_comments_and_profiles(cls, stories, user_id, check_all=False):
r = redis.Redis(connection_pool=settings.REDIS_POOL)
friend_key = "F:%s:F" % (user_id)
profile_user_ids = set()
for story in stories:
story['friend_comments'] = []
story['public_comments'] = []
story['reply_count'] = 0
if check_all or story['comment_count']:
comment_key = "C:%s:%s" % (story['story_feed_id'], story['guid_hash'])
story['comment_count'] = r.scard(comment_key)
friends_with_comments = [int(f) for f in r.sinter(comment_key, friend_key)]
sharer_user_ids = [int(f) for f in r.smembers(comment_key)]
shared_stories = []
if sharer_user_ids:
params = {
'story_hash': story['story_hash'],
'user_id__in': sharer_user_ids,
}
shared_stories = cls.objects.filter(**params)
for shared_story in shared_stories:
comments = shared_story.comments_with_author()
story['reply_count'] += len(comments['replies'])
if shared_story.user_id in friends_with_comments:
story['friend_comments'].append(comments)
else:
story['public_comments'].append(comments)
if comments.get('source_user_id'):
profile_user_ids.add(comments['source_user_id'])
if comments.get('liking_users'):
profile_user_ids = profile_user_ids.union(comments['liking_users'])
all_comments = story['friend_comments'] + story['public_comments']
profile_user_ids = profile_user_ids.union([reply['user_id']
for c in all_comments
for reply in c['replies']])
if story.get('source_user_id'):
profile_user_ids.add(story['source_user_id'])
story['comment_count_friends'] = len(friends_with_comments)
story['comment_count_public'] = story['comment_count'] - len(friends_with_comments)
if check_all or story['share_count']:
share_key = "S:%s:%s" % (story['story_feed_id'], story['guid_hash'])
story['share_count'] = r.scard(share_key)
friends_with_shares = [int(f) for f in r.sinter(share_key, friend_key)]
nonfriend_user_ids = [int(f) for f in r.sdiff(share_key, friend_key)]
profile_user_ids.update(nonfriend_user_ids)
profile_user_ids.update(friends_with_shares)
story['commented_by_public'] = [c['user_id'] for c in story['public_comments']]
story['commented_by_friends'] = [c['user_id'] for c in story['friend_comments']]
story['shared_by_public'] = list(set(nonfriend_user_ids) -
set(story['commented_by_public']))
story['shared_by_friends'] = list(set(friends_with_shares) -
set(story['commented_by_friends']))
story['share_count_public'] = story['share_count'] - len(friends_with_shares)
story['share_count_friends'] = len(friends_with_shares)
story['friend_user_ids'] = list(set(story['commented_by_friends'] + story['shared_by_friends']))
story['public_user_ids'] = list(set(story['commented_by_public'] + story['shared_by_public']))
if not story['share_user_ids']:
story['share_user_ids'] = story['friend_user_ids'] + story['public_user_ids']
if story.get('source_user_id'):
profile_user_ids.add(story['source_user_id'])
profiles = MSocialProfile.objects.filter(user_id__in=list(profile_user_ids))
profiles = [profile.canonical(compact=True) for profile in profiles]
# Toss public comments by private profiles
profiles_dict = dict((profile['user_id'], profile) for profile in profiles)
for story in stories:
commented_by_public = story.get('commented_by_public') or [c['user_id'] for c in story['public_comments']]
for user_id in commented_by_public:
if profiles_dict[user_id]['private']:
story['public_comments'] = [c for c in story['public_comments'] if c['user_id'] != user_id]
story['comment_count_public'] -= 1
return stories, profiles
@staticmethod
def attach_users_to_stories(stories, profiles):
profiles = dict([(p['user_id'], p) for p in profiles])
for s, story in enumerate(stories):
for u, user_id in enumerate(story['shared_by_friends']):
if user_id not in profiles: continue
stories[s]['shared_by_friends'][u] = profiles[user_id]
for u, user_id in enumerate(story['shared_by_public']):
if user_id not in profiles: continue
stories[s]['shared_by_public'][u] = profiles[user_id]
for comment_set in ['friend_comments', 'public_comments']:
for c, comment in enumerate(story[comment_set]):
if comment['user_id'] not in profiles: continue
stories[s][comment_set][c]['user'] = profiles[comment['user_id']]
if comment['source_user_id'] and comment['source_user_id'] in profiles:
stories[s][comment_set][c]['source_user'] = profiles[comment['source_user_id']]
for r, reply in enumerate(comment['replies']):
if reply['user_id'] not in profiles: continue
stories[s][comment_set][c]['replies'][r]['user'] = profiles[reply['user_id']]
stories[s][comment_set][c]['liking_user_ids'] = list(comment['liking_users'])
for u, user_id in enumerate(comment['liking_users']):
if user_id not in profiles: continue
stories[s][comment_set][c]['liking_users'][u] = profiles[user_id]
return stories
@staticmethod
def attach_users_to_comment(comment, profiles):
profiles = dict([(p['user_id'], p) for p in profiles])
if comment['user_id'] not in profiles: return comment
comment['user'] = profiles[comment['user_id']]
if comment['source_user_id']:
comment['source_user'] = profiles[comment['source_user_id']]
for r, reply in enumerate(comment['replies']):
if reply['user_id'] not in profiles: continue
comment['replies'][r]['user'] = profiles[reply['user_id']]
comment['liking_user_ids'] = list(comment['liking_users'])
for u, user_id in enumerate(comment['liking_users']):
if user_id not in profiles: continue
comment['liking_users'][u] = profiles[user_id]
return comment
def add_liking_user(self, user_id):
if user_id not in self.liking_users:
self.liking_users.append(user_id)
self.save()
def remove_liking_user(self, user_id):
if user_id in self.liking_users:
self.liking_users.remove(user_id)
self.save()
def blurblog_permalink(self):
profile = MSocialProfile.get_user(self.user_id)
return "%s/story/%s/%s" % (
profile.blurblog_url,
slugify(self.story_title)[:20],
self.guid_hash[:6]
)
def generate_post_to_service_message(self, truncate=None, include_url=True):
message = strip_tags(self.comments)
if not message or len(message) < 1:
message = self.decoded_story_title
if include_url and truncate:
message = truncate_chars(message, truncate - 18 - 30)
feed = Feed.get_by_id(self.story_feed_id)
if feed:
if truncate:
message += " (%s)" % truncate_chars(feed.feed_title, 18)
else:
message += " (%s)" % truncate_chars(feed.feed_title, 30)
if include_url:
message += " " + self.blurblog_permalink()
elif include_url:
if truncate:
message = truncate_chars(message, truncate - 14)
message += " " + self.blurblog_permalink()
return message
def post_to_service(self, service):
user = User.objects.get(pk=self.user_id)
if service in self.posted_to_services:
logging.user(user, "~BM~FRAlready posted to %s." % (service))
return
posted = False
social_service = MSocialServices.objects.get(user_id=self.user_id)
message = self.generate_post_to_service_message()
logging.user(user, "~BM~FGPosting to %s: ~SB%s" % (service, message))
if service == 'twitter':
posted = social_service.post_to_twitter(self)
elif service == 'facebook':
posted = social_service.post_to_facebook(self)
elif service == 'appdotnet':
posted = social_service.post_to_appdotnet(self)
if posted:
self.posted_to_services.append(service)
self.save()
def notify_user_ids(self, include_parent=True):
user_ids = set()
for reply in self.replies:
if reply.user_id not in self.mute_email_users:
user_ids.add(reply.user_id)
if include_parent and self.user_id not in self.mute_email_users:
user_ids.add(self.user_id)
return list(user_ids)
def reply_for_id(self, reply_id):
for reply in self.replies:
if reply.reply_id == reply_id:
return reply
def send_emails_for_new_reply(self, reply_id):
if reply_id in self.emailed_replies:
logging.debug(" ***> Already sent reply email: %s on %s" % (reply_id, self))
return
reply = self.reply_for_id(reply_id)
if not reply:
logging.debug(" ***> Reply doesn't exist: %s on %s" % (reply_id, self))
return
notify_user_ids = self.notify_user_ids()
if reply.user_id in notify_user_ids:
notify_user_ids.remove(reply.user_id)
reply_user = User.objects.get(pk=reply.user_id)
reply_user_profile = MSocialProfile.get_user(reply.user_id)
sent_emails = 0
story_feed = Feed.get_by_id(self.story_feed_id)
comment = self.comments_with_author()
profile_user_ids = set([comment['user_id']])
reply_user_ids = list(r['user_id'] for r in comment['replies'])
profile_user_ids = profile_user_ids.union(reply_user_ids)
if self.source_user_id:
profile_user_ids.add(self.source_user_id)
profiles = MSocialProfile.objects.filter(user_id__in=list(profile_user_ids))
profiles = [profile.canonical(compact=True) for profile in profiles]
comment = MSharedStory.attach_users_to_comment(comment, profiles)
for user_id in notify_user_ids:
user = User.objects.get(pk=user_id)
if not user.email or not user.profile.send_emails:
if not user.email:
logging.user(user, "~FMNo email to send to, skipping.")
elif not user.profile.send_emails:
logging.user(user, "~FMDisabled emails, skipping.")
continue
mute_url = "http://%s%s" % (
Site.objects.get_current().domain,
reverse('social-mute-story', kwargs={
'secret_token': user.profile.secret_token,
'shared_story_id': self.id,
})
)
data = {
'reply_user_profile': reply_user_profile,
'comment': comment,
'shared_story': self,
'story_feed': story_feed,
'mute_url': mute_url,
}
story_title = self.decoded_story_title.replace('\n', ' ')
text = render_to_string('mail/email_reply.txt', data)
html = pynliner.fromString(render_to_string('mail/email_reply.xhtml', data))
subject = "%s replied to you on \"%s\" on NewsBlur" % (reply_user.username, story_title)
msg = EmailMultiAlternatives(subject, text,
from_email='NewsBlur <%s>' % settings.HELLO_EMAIL,
to=['%s <%s>' % (user.username, user.email)])
msg.attach_alternative(html, "text/html")
msg.send()
sent_emails += 1
logging.user(reply_user, "~BB~FM~SBSending %s/%s email%s for new reply: %s" % (
sent_emails, len(notify_user_ids),
'' if len(notify_user_ids) == 1 else 's',
self.decoded_story_title[:30]))
self.emailed_replies.append(reply.reply_id)
self.save()
def send_email_for_reshare(self):
if self.emailed_reshare:
logging.debug(" ***> Already sent reply email: %s" % self)
return
reshare_user = User.objects.get(pk=self.user_id)
reshare_user_profile = MSocialProfile.get_user(self.user_id)
original_user = User.objects.get(pk=self.source_user_id)
original_shared_story = MSharedStory.objects.get(user_id=self.source_user_id,
story_guid=self.story_guid)
if not original_user.email or not original_user.profile.send_emails:
if not original_user.email:
logging.user(original_user, "~FMNo email to send to, skipping.")
elif not original_user.profile.send_emails:
logging.user(original_user, "~FMDisabled emails, skipping.")
return
story_feed = Feed.get_by_id(self.story_feed_id)
comment = self.comments_with_author()
profile_user_ids = set([comment['user_id']])
reply_user_ids = [reply['user_id'] for reply in comment['replies']]
profile_user_ids = profile_user_ids.union(reply_user_ids)
if self.source_user_id:
profile_user_ids.add(self.source_user_id)
profiles = MSocialProfile.objects.filter(user_id__in=list(profile_user_ids))
profiles = [profile.canonical(compact=True) for profile in profiles]
comment = MSharedStory.attach_users_to_comment(comment, profiles)
mute_url = "http://%s%s" % (
Site.objects.get_current().domain,
reverse('social-mute-story', kwargs={
'secret_token': original_user.profile.secret_token,
'shared_story_id': original_shared_story.id,
})
)
data = {
'comment': comment,
'shared_story': self,
'reshare_user_profile': reshare_user_profile,
'original_shared_story': original_shared_story,
'story_feed': story_feed,
'mute_url': mute_url,
}
story_title = self.decoded_story_title.replace('\n', ' ')
text = render_to_string('mail/email_reshare.txt', data)
html = pynliner.fromString(render_to_string('mail/email_reshare.xhtml', data))
subject = "%s re-shared \"%s\" from you on NewsBlur" % (reshare_user.username, story_title)
msg = EmailMultiAlternatives(subject, text,
from_email='NewsBlur <%s>' % settings.HELLO_EMAIL,
to=['%s <%s>' % (original_user.username, original_user.email)])
msg.attach_alternative(html, "text/html")
msg.send()
self.emailed_reshare = True
self.save()
logging.user(reshare_user, "~BB~FM~SBSending %s email for story re-share: %s" % (
original_user.username,
self.decoded_story_title[:30]))
def calculate_image_sizes(self, force=False):
if not self.story_content_z:
return
if not force and self.image_count:
return self.image_sizes
headers = {
'User-Agent': 'NewsBlur Image Fetcher - %s '
'(Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_1) '
'AppleWebKit/534.48.3 (KHTML, like Gecko) Version/5.1 '
'Safari/534.48.3)' % (
settings.NEWSBLUR_URL
),
}
soup = BeautifulSoup(zlib.decompress(self.story_content_z))
image_sources = [img.get('src') for img in soup.findAll('img')]
image_sizes = []
for image_source in image_sources[:10]:
if any(ignore in image_source for ignore in IGNORE_IMAGE_SOURCES):
continue
req = requests.get(image_source, headers=headers, stream=True)
datastream = StringIO(req.content[:30])
_, width, height = image_size(datastream)
if width <= 16 or height <= 16:
continue
image_sizes.append({'src': image_source, 'size': (width, height)})
if image_sizes:
image_sizes = sorted(image_sizes, key=lambda i: i['size'][0] * i['size'][1],
reverse=True)
self.image_sizes = image_sizes
self.image_count = len(image_sizes)
self.save()
logging.debug(" ---> ~SN~FGFetched image sizes on shared story: ~SB%s images" % self.image_count)
return image_sizes
def fetch_original_text(self, force=False, request=None):
original_text_z = self.original_text_z
feed = Feed.get_by_id(self.story_feed_id)
if not original_text_z or force:
ti = TextImporter(self, feed, request=request)
original_text = ti.fetch()
else:
logging.user(request, "~FYFetching ~FGoriginal~FY story text, ~SBfound.")
original_text = zlib.decompress(original_text_z)
return original_text
class MSocialServices(mongo.Document):
user_id = mongo.IntField()
autofollow = mongo.BooleanField(default=True)
twitter_uid = mongo.StringField()
twitter_access_key = mongo.StringField()
twitter_access_secret = mongo.StringField()
twitter_friend_ids = mongo.ListField(mongo.StringField())
twitter_picture_url = mongo.StringField()
twitter_username = mongo.StringField()
twitter_refresh_date = mongo.DateTimeField()
facebook_uid = mongo.StringField()
facebook_access_token = mongo.StringField()
facebook_friend_ids = mongo.ListField(mongo.StringField())
facebook_picture_url = mongo.StringField()
facebook_refresh_date = mongo.DateTimeField()
appdotnet_uid = mongo.StringField()
appdotnet_access_token= mongo.StringField()
appdotnet_friend_ids = mongo.ListField(mongo.StringField())
appdotnet_picture_url = mongo.StringField()
appdotnet_refresh_date= mongo.DateTimeField()
upload_picture_url = mongo.StringField()
syncing_twitter = mongo.BooleanField(default=False)
syncing_facebook = mongo.BooleanField(default=False)
syncing_appdotnet = mongo.BooleanField(default=False)
meta = {
'collection': 'social_services',
'indexes': ['user_id', 'twitter_friend_ids', 'facebook_friend_ids', 'twitter_uid', 'facebook_uid', 'appdotnet_uid'],
'allow_inheritance': False,
}
def __unicode__(self):
user = User.objects.get(pk=self.user_id)
return "%s (Twitter: %s, FB: %s, ADN: %s)" % (user.username, self.twitter_uid, self.facebook_uid, self.appdotnet_uid)
def canonical(self):
user = User.objects.get(pk=self.user_id)
return {
'twitter': {
'twitter_username': self.twitter_username,
'twitter_picture_url': self.twitter_picture_url,
'twitter_uid': self.twitter_uid,
'syncing': self.syncing_twitter,
},
'facebook': {
'facebook_uid': self.facebook_uid,
'facebook_picture_url': self.facebook_picture_url,
'syncing': self.syncing_facebook,
},
'appdotnet': {
'appdotnet_uid': self.appdotnet_uid,
'appdotnet_picture_url': self.appdotnet_picture_url,
'syncing': self.syncing_appdotnet,
},
'gravatar': {
'gravatar_picture_url': "https://www.gravatar.com/avatar/" + \
hashlib.md5(user.email.lower()).hexdigest()
},
'upload': {
'upload_picture_url': self.upload_picture_url
}
}
@classmethod
def get_user(cls, user_id):
try:
profile, created = cls.objects.get_or_create(user_id=user_id)
except cls.MultipleObjectsReturned:
dupes = cls.objects.filter(user_id=user_id)
logging.debug(" ---> ~FRDeleting dupe social services. %s found." % dupes.count())
for dupe in dupes[1:]:
dupe.delete()
profile = dupes[0]
created = False
if created:
profile.save()
return profile
@classmethod
def profile(cls, user_id):
profile = cls.get_user(user_id=user_id)
return profile.canonical()
def save_uploaded_photo(self, photo):
photo_body = photo.read()
filename = photo.name
s3 = s3_utils.S3Store()
image_name = s3.save_profile_picture(self.user_id, filename, photo_body)
if image_name:
self.upload_picture_url = "https://s3.amazonaws.com/%s/avatars/%s/thumbnail_%s" % (
settings.S3_AVATARS_BUCKET_NAME,
self.user_id,
image_name,
)
self.save()
return image_name and self.upload_picture_url
def twitter_api(self):
twitter_consumer_key = settings.TWITTER_CONSUMER_KEY
twitter_consumer_secret = settings.TWITTER_CONSUMER_SECRET
auth = tweepy.OAuthHandler(twitter_consumer_key, twitter_consumer_secret)
auth.set_access_token(self.twitter_access_key, self.twitter_access_secret)
api = tweepy.API(auth)
return api
def facebook_api(self):
graph = facebook.GraphAPI(self.facebook_access_token)
return graph
def appdotnet_api(self):
adn_api = appdotnet.Appdotnet(access_token=self.appdotnet_access_token)
return adn_api
def sync_twitter_friends(self):
user = User.objects.get(pk=self.user_id)
logging.user(user, "~BG~FMTwitter import starting...")
api = self.twitter_api()
if not api:
logging.user(user, "~BG~FMTwitter import ~SBfailed~SN: no api access.")
self.syncing_twitter = False
self.save()
return
twitter_user = api.me()
self.twitter_picture_url = twitter_user.profile_image_url_https
self.twitter_username = twitter_user.screen_name
self.twitter_refreshed_date = datetime.datetime.utcnow()
self.syncing_twitter = False
self.save()
profile = MSocialProfile.get_user(self.user_id)
profile.location = profile.location or twitter_user.location
profile.bio = profile.bio or twitter_user.description
profile.website = profile.website or twitter_user.url
profile.save()
profile.count_follows()
if not profile.photo_url or not profile.photo_service:
self.set_photo('twitter')
try:
friend_ids = list(unicode(friend.id) for friend in tweepy.Cursor(api.friends).items())
except tweepy.TweepError, e:
logging.user(user, "~BG~FMTwitter import ~SBfailed~SN: %s" % e)
return
if not friend_ids:
logging.user(user, "~BG~FMTwitter import ~SBfailed~SN: no friend_ids.")
self.twitter_friend_ids = friend_ids
self.save()
following = self.follow_twitter_friends()
if not following:
logging.user(user, "~BG~FMTwitter import finished.")
def follow_twitter_friends(self):
social_profile = MSocialProfile.get_user(self.user_id)
following = []
followers = 0
if not self.autofollow:
return following
# Follow any friends already on NewsBlur
user_social_services = MSocialServices.objects.filter(twitter_uid__in=self.twitter_friend_ids)
for user_social_service in user_social_services:
followee_user_id = user_social_service.user_id
socialsub = social_profile.follow_user(followee_user_id)
if socialsub:
following.append(followee_user_id)
# Friends already on NewsBlur should follow back
# following_users = MSocialServices.objects.filter(twitter_friend_ids__contains=self.twitter_uid)
# for following_user in following_users:
# if following_user.autofollow:
# following_user_profile = MSocialProfile.get_user(following_user.user_id)
# following_user_profile.follow_user(self.user_id, check_unfollowed=True)
# followers += 1
user = User.objects.get(pk=self.user_id)
logging.user(user, "~BG~FMTwitter import: %s users, now following ~SB%s~SN with ~SB%s~SN follower-backs" % (len(self.twitter_friend_ids), len(following), followers))
return following
def sync_facebook_friends(self):
user = User.objects.get(pk=self.user_id)
logging.user(user, "~BG~FMFacebook import starting...")
graph = self.facebook_api()
if not graph:
logging.user(user, "~BG~FMFacebook import ~SBfailed~SN: no api access.")
self.syncing_facebook = False
self.save()
return
friends = graph.get_connections("me", "friends")
if not friends:
logging.user(user, "~BG~FMFacebook import ~SBfailed~SN: no friend_ids.")
self.syncing_facebook = False
self.save()
return
facebook_friend_ids = [unicode(friend["id"]) for friend in friends["data"]]
self.facebook_friend_ids = facebook_friend_ids
self.facebook_refresh_date = datetime.datetime.utcnow()
self.facebook_picture_url = "//graph.facebook.com/%s/picture" % self.facebook_uid
self.syncing_facebook = False
self.save()
facebook_user = graph.request('me', args={'fields':'website,bio,location'})
profile = MSocialProfile.get_user(self.user_id)
profile.location = profile.location or (facebook_user.get('location') and facebook_user['location']['name'])
profile.bio = profile.bio or facebook_user.get('bio')
if not profile.website and facebook_user.get('website'):
profile.website = facebook_user.get('website').split()[0]
profile.save()
profile.count_follows()
if not profile.photo_url or not profile.photo_service:
self.set_photo('facebook')
self.follow_facebook_friends()
def follow_facebook_friends(self):
social_profile = MSocialProfile.get_user(self.user_id)
following = []
followers = 0
if not self.autofollow:
return following
# Follow any friends already on NewsBlur
user_social_services = MSocialServices.objects.filter(facebook_uid__in=self.facebook_friend_ids)
for user_social_service in user_social_services:
followee_user_id = user_social_service.user_id
socialsub = social_profile.follow_user(followee_user_id)
if socialsub:
following.append(followee_user_id)
# Friends already on NewsBlur should follow back
# following_users = MSocialServices.objects.filter(facebook_friend_ids__contains=self.facebook_uid)
# for following_user in following_users:
# if following_user.autofollow:
# following_user_profile = MSocialProfile.get_user(following_user.user_id)
# following_user_profile.follow_user(self.user_id, check_unfollowed=True)
# followers += 1
user = User.objects.get(pk=self.user_id)
logging.user(user, "~BG~FMFacebook import: %s users, now following ~SB%s~SN with ~SB%s~SN follower-backs" % (len(self.facebook_friend_ids), len(following), followers))
return following
def sync_appdotnet_friends(self):
user = User.objects.get(pk=self.user_id)
logging.user(user, "~BG~FMApp.net import starting...")
api = self.appdotnet_api()
if not api:
logging.user(user, "~BG~FMApp.net import ~SBfailed~SN: no api access.")
self.syncing_appdotnet = False
self.save()
return
friend_ids = []
has_more_friends = True
before_id = None
since_id = None
while has_more_friends:
friends_resp = api.getUserFollowingIds(self.appdotnet_uid,
before_id=before_id,
since_id=since_id)
friends = json.decode(friends_resp)
before_id = friends['meta'].get('min_id')
since_id = friends['meta'].get('max_id')
has_more_friends = friends['meta'].get('more')
friend_ids.extend([fid for fid in friends['data']])
if not friend_ids:
logging.user(user, "~BG~FMApp.net import ~SBfailed~SN: no friend_ids.")
self.syncing_appdotnet = False
self.save()
return
adn_user = json.decode(api.getUser(self.appdotnet_uid))['data']
self.appdotnet_picture_url = adn_user['avatar_image']['url']
self.appdotnet_username = adn_user['username']
self.appdotnet_friend_ids = friend_ids
self.appdotnet_refreshed_date = datetime.datetime.utcnow()
self.syncing_appdotnet = False
self.save()
profile = MSocialProfile.get_user(self.user_id)
profile.bio = profile.bio or adn_user['description']['text']
profile.save()
profile.count_follows()
if not profile.photo_url or not profile.photo_service:
self.set_photo('appdotnet')
self.follow_appdotnet_friends()
def follow_appdotnet_friends(self):
social_profile = MSocialProfile.get_user(self.user_id)
following = []
followers = 0
if not self.autofollow:
return following
# Follow any friends already on NewsBlur
user_social_services = MSocialServices.objects.filter(appdotnet_uid__in=self.appdotnet_friend_ids)
for user_social_service in user_social_services:
followee_user_id = user_social_service.user_id
socialsub = social_profile.follow_user(followee_user_id)
if socialsub:
following.append(followee_user_id)
# Friends already on NewsBlur should follow back
# following_users = MSocialServices.objects.filter(appdotnet_friend_ids__contains=self.appdotnet_uid)
# for following_user in following_users:
# if following_user.autofollow:
# following_user_profile = MSocialProfile.get_user(following_user.user_id)
# following_user_profile.follow_user(self.user_id, check_unfollowed=True)
# followers += 1
user = User.objects.get(pk=self.user_id)
logging.user(user, "~BG~FMApp.net import: %s users, now following ~SB%s~SN with ~SB%s~SN follower-backs" % (len(self.appdotnet_friend_ids), len(following), followers))
return following
def disconnect_twitter(self):
self.twitter_uid = None
self.save()
def disconnect_facebook(self):
self.facebook_uid = None
self.save()
def disconnect_appdotnet(self):
self.appdotnet_uid = None
self.save()
def set_photo(self, service):
profile = MSocialProfile.get_user(self.user_id)
if service == 'nothing':
service = None
profile.photo_service = service
if not service:
profile.photo_url = None
elif service == 'twitter':
profile.photo_url = self.twitter_picture_url
elif service == 'facebook':
profile.photo_url = self.facebook_picture_url
elif service == 'upload':
profile.photo_url = self.upload_picture_url
elif service == 'gravatar':
user = User.objects.get(pk=self.user_id)
profile.photo_url = "https://www.gravatar.com/avatar/" + \
hashlib.md5(user.email).hexdigest()
profile.save()
return profile
@classmethod
def sync_all_twitter_photos(cls, days=14):
week_ago = datetime.datetime.now() - datetime.timedelta(days=days)
shares = MSharedStory.objects.filter(shared_date__gte=week_ago)
sharers = sorted(set([s.user_id for s in shares]))
print " ---> %s sharing user_ids" % len(sorted(sharers))
for user_id in sharers:
profile = MSocialProfile.objects.get(user_id=user_id)
if not profile.photo_service == 'twitter': continue
ss = MSocialServices.objects.get(user_id=user_id)
try:
ss.sync_twitter_photo()
print " ---> Syncing %s" % user_id
except Exception, e:
print " ***> Exception on %s: %s" % (user_id, e)
def sync_twitter_photo(self):
profile = MSocialProfile.get_user(self.user_id)
if profile.photo_service != "twitter":
return
user = User.objects.get(pk=self.user_id)
logging.user(user, "~FCSyncing Twitter profile photo...")
try:
api = self.twitter_api()
me = api.me()
except tweepy.TweepError, e:
logging.user(user, "~FRException (%s): ~FCsetting to blank profile photo" % e)
self.twitter_picture_url = None
self.set_photo("nothing")
return
self.twitter_picture_url = me.profile_image_url_https
self.save()
self.set_photo('twitter')
def post_to_twitter(self, shared_story):
message = shared_story.generate_post_to_service_message(truncate=140)
try:
api = self.twitter_api()
api.update_status(status=message)
except tweepy.TweepError, e:
print e
return
return True
def post_to_facebook(self, shared_story):
message = shared_story.generate_post_to_service_message(include_url=False)
shared_story.calculate_image_sizes()
content = zlib.decompress(shared_story.story_content_z)[:1024]
try:
api = self.facebook_api()
# api.put_wall_post(message=message)
api.put_object('me', '%s:share' % settings.FACEBOOK_NAMESPACE,
link=shared_story.blurblog_permalink(),
type="link",
name=shared_story.decoded_story_title,
description=content,
website=shared_story.blurblog_permalink(),
message=message,
)
except facebook.GraphAPIError, e:
print e
return
return True
def post_to_appdotnet(self, shared_story):
message = shared_story.generate_post_to_service_message(truncate=256)
try:
api = self.appdotnet_api()
api.createPost(text=message, links=[{
'text': shared_story.decoded_story_title,
'url': shared_story.blurblog_permalink()
}])
except Exception, e:
print e
return
return True
class MInteraction(mongo.Document):
user_id = mongo.IntField()
date = mongo.DateTimeField(default=datetime.datetime.now)
category = mongo.StringField()
title = mongo.StringField()
content = mongo.StringField()
with_user_id = mongo.IntField()
feed_id = mongo.DynamicField()
story_feed_id= mongo.IntField()
content_id = mongo.StringField()
meta = {
'collection': 'interactions',
'indexes': [('user_id', '-date'), 'category', 'with_user_id'],
'allow_inheritance': False,
'index_drop_dups': True,
'ordering': ['-date'],
}
def __unicode__(self):
user = User.objects.get(pk=self.user_id)
with_user = self.with_user_id and User.objects.get(pk=self.with_user_id)
return "<%s> %s on %s: %s - %s" % (user.username, with_user and with_user.username, self.date,
self.category, self.content and self.content[:20])
def canonical(self):
return {
'date': self.date,
'category': self.category,
'title': self.title,
'content': self.content,
'with_user_id': self.with_user_id,
'feed_id': self.feed_id,
'story_feed_id': self.story_feed_id,
'content_id': self.content_id,
}
@classmethod
def publish_update_to_subscribers(self, user_id):
user = User.objects.get(pk=user_id)
try:
r = redis.Redis(connection_pool=settings.REDIS_POOL)
listeners_count = r.publish(user.username, 'interaction:new')
if listeners_count:
logging.debug(" ---> ~FMPublished to %s subscribers" % (listeners_count))
except redis.ConnectionError:
logging.debug(" ***> ~BMRedis is unavailable for real-time.")
@classmethod
def user(cls, user_id, page=1, limit=None, categories=None):
user_profile = Profile.objects.get(user=user_id)
dashboard_date = user_profile.dashboard_date or user_profile.last_seen_on
page = max(1, page)
limit = int(limit) if limit else 4
offset = (page-1) * limit
interactions_db = cls.objects.filter(user_id=user_id)
if categories:
interactions_db = interactions_db.filter(category__in=categories)
interactions_db = interactions_db[offset:offset+limit+1]
has_next_page = len(interactions_db) > limit
interactions_db = interactions_db[offset:offset+limit]
with_user_ids = [i.with_user_id for i in interactions_db if i.with_user_id]
social_profiles = dict((p.user_id, p) for p in MSocialProfile.objects.filter(user_id__in=with_user_ids))
interactions = []
for interaction_db in interactions_db:
interaction = interaction_db.canonical()
social_profile = social_profiles.get(interaction_db.with_user_id)
if social_profile:
interaction['photo_url'] = social_profile.profile_photo_url
interaction['with_user'] = social_profiles.get(interaction_db.with_user_id)
interaction['time_since'] = relative_timesince(interaction_db.date)
interaction['date'] = interaction_db.date
interaction['is_new'] = interaction_db.date > dashboard_date
interactions.append(interaction)
return interactions, has_next_page
@classmethod
def user_unread_count(cls, user_id):
user_profile = Profile.objects.get(user=user_id)
dashboard_date = user_profile.dashboard_date or user_profile.last_seen_on
interactions_count = cls.objects.filter(user_id=user_id, date__gte=dashboard_date).count()
return interactions_count
@classmethod
def new_follow(cls, follower_user_id, followee_user_id):
params = {
'user_id': followee_user_id,
'with_user_id': follower_user_id,
'category': 'follow',
}
try:
cls.objects.get_or_create(**params)
except cls.MultipleObjectsReturned:
dupes = cls.objects.filter(**params).order_by('-date')
logging.debug(" ---> ~FRDeleting dupe follow interactions. %s found." % dupes.count())
for dupe in dupes[1:]:
dupe.delete()
cls.publish_update_to_subscribers(followee_user_id)
@classmethod
def new_comment_reply(cls, user_id, reply_user_id, reply_content, story_id, story_feed_id, story_title=None, original_message=None):
params = {
'user_id': user_id,
'with_user_id': reply_user_id,
'category': 'comment_reply',
'content': linkify(strip_tags(reply_content)),
'feed_id': "social:%s" % user_id,
'story_feed_id': story_feed_id,
'title': story_title,
'content_id': story_id,
}
if original_message:
params['content'] = original_message
original = cls.objects.filter(**params).limit(1)
if original:
original = original[0]
original.content = linkify(strip_tags(reply_content))
original.save()
else:
original_message = None
if not original_message:
cls.objects.create(**params)
cls.publish_update_to_subscribers(user_id)
@classmethod
def remove_comment_reply(cls, user_id, reply_user_id, reply_content, story_id, story_feed_id):
params = {
'user_id': user_id,
'with_user_id': reply_user_id,
'category': 'comment_reply',
'content': linkify(strip_tags(reply_content)),
'feed_id': "social:%s" % user_id,
'story_feed_id': story_feed_id,
'content_id': story_id,
}
original = cls.objects.filter(**params)
original.delete()
cls.publish_update_to_subscribers(user_id)
@classmethod
def new_comment_like(cls, liking_user_id, comment_user_id, story_id, story_title, comments):
cls.objects.get_or_create(user_id=comment_user_id,
with_user_id=liking_user_id,
category="comment_like",
feed_id="social:%s" % comment_user_id,
content_id=story_id,
defaults={
"title": story_title,
"content": comments,
})
cls.publish_update_to_subscribers(comment_user_id)
@classmethod
def new_reply_reply(cls, user_id, comment_user_id, reply_user_id, reply_content, story_id, story_feed_id, story_title=None, original_message=None):
params = {
'user_id': user_id,
'with_user_id': reply_user_id,
'category': 'reply_reply',
'content': linkify(strip_tags(reply_content)),
'feed_id': "social:%s" % comment_user_id,
'story_feed_id': story_feed_id,
'title': story_title,
'content_id': story_id,
}
if original_message:
params['content'] = original_message
original = cls.objects.filter(**params).limit(1)
if original:
original = original[0]
original.content = reply_content
original.save()
else:
original_message = None
if not original_message:
cls.objects.create(**params)
cls.publish_update_to_subscribers(user_id)
@classmethod
def remove_reply_reply(cls, user_id, comment_user_id, reply_user_id, reply_content, story_id, story_feed_id):
params = {
'user_id': user_id,
'with_user_id': reply_user_id,
'category': 'reply_reply',
'content': linkify(strip_tags(reply_content)),
'feed_id': "social:%s" % comment_user_id,
'story_feed_id': story_feed_id,
'content_id': story_id,
}
original = cls.objects.filter(**params)
original.delete()
cls.publish_update_to_subscribers(user_id)
@classmethod
def new_reshared_story(cls, user_id, reshare_user_id, comments, story_title, story_feed_id, story_id, original_comments=None):
params = {
'user_id': user_id,
'with_user_id': reshare_user_id,
'category': 'story_reshare',
'content': comments,
'title': story_title,
'feed_id': "social:%s" % reshare_user_id,
'story_feed_id': story_feed_id,
'content_id': story_id,
}
if original_comments:
params['content'] = original_comments
original = cls.objects.filter(**params).limit(1)
if original:
interaction = original[0]
interaction.content = comments
interaction.save()
else:
original_comments = None
if not original_comments:
cls.objects.create(**params)
cls.publish_update_to_subscribers(user_id)
class MActivity(mongo.Document):
user_id = mongo.IntField()
date = mongo.DateTimeField(default=datetime.datetime.now)
category = mongo.StringField()
title = mongo.StringField()
content = mongo.StringField()
with_user_id = mongo.IntField()
feed_id = mongo.DynamicField()
story_feed_id= mongo.IntField()
content_id = mongo.StringField()
meta = {
'collection': 'activities',
'indexes': [('user_id', '-date'), 'category', 'with_user_id'],
'allow_inheritance': False,
'index_drop_dups': True,
'ordering': ['-date'],
}
def __unicode__(self):
user = User.objects.get(pk=self.user_id)
return "<%s> %s - %s" % (user.username, self.category, self.content and self.content[:20])
def canonical(self):
return {
'date': self.date,
'category': self.category,
'title': self.title,
'content': self.content,
'user_id': self.user_id,
'with_user_id': self.with_user_id or self.user_id,
'feed_id': self.feed_id or self.story_feed_id,
'story_feed_id': self.story_feed_id or self.feed_id,
'content_id': self.content_id,
}
@classmethod
def user(cls, user_id, page=1, limit=4, public=False, categories=None):
user_profile = Profile.objects.get(user=user_id)
dashboard_date = user_profile.dashboard_date or user_profile.last_seen_on
page = max(1, page)
limit = int(limit)
offset = (page-1) * limit
activities_db = cls.objects.filter(user_id=user_id)
if categories:
activities_db = activities_db.filter(category__in=categories)
if public:
activities_db = activities_db.filter(category__nin=['star', 'feedsub'])
activities_db = activities_db[offset:offset+limit+1]
has_next_page = len(activities_db) > limit
activities_db = activities_db[offset:offset+limit]
with_user_ids = [a.with_user_id for a in activities_db if a.with_user_id]
social_profiles = dict((p.user_id, p) for p in MSocialProfile.objects.filter(user_id__in=with_user_ids))
activities = []
for activity_db in activities_db:
activity = activity_db.canonical()
activity['date'] = activity_db.date
activity['time_since'] = relative_timesince(activity_db.date)
social_profile = social_profiles.get(activity_db.with_user_id)
if social_profile:
activity['photo_url'] = social_profile.profile_photo_url
activity['is_new'] = activity_db.date > dashboard_date
activity['with_user'] = social_profiles.get(activity_db.with_user_id or activity_db.user_id)
activities.append(activity)
return activities, has_next_page
@classmethod
def new_starred_story(cls, user_id, story_title, story_feed_id, story_id):
cls.objects.get_or_create(user_id=user_id,
category='star',
story_feed_id=story_feed_id,
content_id=story_id,
defaults=dict(content=story_title))
@classmethod
def remove_starred_story(cls, user_id, story_feed_id, story_id):
params = {
'user_id': user_id,
'category': 'star',
'story_feed_id': story_feed_id,
'content_id': story_id,
}
original = cls.objects.filter(**params)
original.delete()
@classmethod
def new_feed_subscription(cls, user_id, feed_id, feed_title):
params = {
"user_id": user_id,
"category": 'feedsub',
"feed_id": feed_id,
}
try:
cls.objects.get_or_create(defaults=dict(content=feed_title), **params)
except cls.MultipleObjectsReturned:
dupes = cls.objects.filter(**params).order_by('-date')
logging.debug(" ---> ~FRDeleting dupe feed subscription activities. %s found." % dupes.count())
for dupe in dupes[1:]:
dupe.delete()
@classmethod
def new_follow(cls, follower_user_id, followee_user_id):
params = {
'user_id': follower_user_id,
'with_user_id': followee_user_id,
'category': 'follow',
}
try:
cls.objects.get_or_create(**params)
except cls.MultipleObjectsReturned:
dupes = cls.objects.filter(**params).order_by('-date')
logging.debug(" ---> ~FRDeleting dupe follow activities. %s found." % dupes.count())
for dupe in dupes[1:]:
dupe.delete()
@classmethod
def new_comment_reply(cls, user_id, comment_user_id, reply_content, story_id, story_feed_id, story_title=None, original_message=None):
params = {
'user_id': user_id,
'with_user_id': comment_user_id,
'category': 'comment_reply',
'content': linkify(strip_tags(reply_content)),
'feed_id': "social:%s" % comment_user_id,
'story_feed_id': story_feed_id,
'title': story_title,
'content_id': story_id,
}
if original_message:
params['content'] = original_message
original = cls.objects.filter(**params).limit(1)
if original:
original = original[0]
original.content = linkify(strip_tags(reply_content))
original.save()
else:
original_message = None
if not original_message:
cls.objects.create(**params)
@classmethod
def remove_comment_reply(cls, user_id, comment_user_id, reply_content, story_id, story_feed_id):
params = {
'user_id': user_id,
'with_user_id': comment_user_id,
'category': 'comment_reply',
'content': linkify(strip_tags(reply_content)),
'feed_id': "social:%s" % comment_user_id,
'story_feed_id': story_feed_id,
'content_id': story_id,
}
original = cls.objects.filter(**params)
original.delete()
@classmethod
def new_comment_like(cls, liking_user_id, comment_user_id, story_id, story_title, comments):
cls.objects.get_or_create(user_id=liking_user_id,
with_user_id=comment_user_id,
category="comment_like",
feed_id="social:%s" % comment_user_id,
content_id=story_id,
defaults={
"title": story_title,
"content": comments,
})
@classmethod
def new_shared_story(cls, user_id, source_user_id, story_title, comments, story_feed_id, story_id, share_date=None):
data = {
"user_id": user_id,
"category": 'sharedstory',
"feed_id": "social:%s" % user_id,
"story_feed_id": story_feed_id,
"content_id": story_id,
}
try:
a, _ = cls.objects.get_or_create(defaults={
'with_user_id': source_user_id,
'title': story_title,
'content': comments,
}, **data)
except cls.MultipleObjectsReturned:
dupes = cls.objects.filter(**data)
logging.debug(" ---> ~FRDeleting dupe shared story activities. %s found." % dupes.count())
a = dupes[0]
for dupe in dupes[1:]:
dupe.delete()
if a.content != comments:
a.content = comments
a.save()
if source_user_id and a.with_user_id != source_user_id:
a.source_user_id = source_user_id
a.save()
if share_date:
a.date = share_date
a.save()
@classmethod
def remove_shared_story(cls, user_id, story_feed_id, story_id):
params = dict(user_id=user_id,
category='sharedstory',
feed_id="social:%s" % user_id,
story_feed_id=story_feed_id,
content_id=story_id)
try:
a = cls.objects.get(**params)
except cls.DoesNotExist:
return
except cls.MultipleObjectsReturned:
a = cls.objects.filter(**params)
a.delete()
@classmethod
def new_signup(cls, user_id):
cls.objects.get_or_create(user_id=user_id,
with_user_id=user_id,
category="signup")
class MFollowRequest(mongo.Document):
follower_user_id = mongo.IntField(unique_with='followee_user_id')
followee_user_id = mongo.IntField()
date = mongo.DateTimeField(default=datetime.datetime.now)
meta = {
'collection': 'follow_request',
'indexes': ['follower_user_id', 'followee_user_id'],
'ordering': ['-date'],
'allow_inheritance': False,
'index_drop_dups': True,
}
@classmethod
def add(cls, follower_user_id, followee_user_id):
cls.objects.get_or_create(follower_user_id=follower_user_id,
followee_user_id=followee_user_id)
@classmethod
def remove(cls, follower_user_id, followee_user_id):
cls.objects.filter(follower_user_id=follower_user_id,
followee_user_id=followee_user_id).delete()
|
mit
| -3,288,628,194,197,878,000 | 42.804901 | 195 | 0.551655 | false | 3.929846 | false | false | false |
tsdotca/dmclient
|
core/hacks.py
|
1
|
2964
|
# core/hacks.py
# Copyright (C) 2018 Alex Mair. All rights reserved.
# This file is part of dmclient.
#
# dmclient is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 2 of the License.
#
# dmclient is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with dmclient. If not, see <http://www.gnu.org/licenses/>.
#
"""This module provides dirty hacks to make PyQt more pleasant to work with.
.. todo::
These should only be around when ``__debug__`` is turned on
"""
from PyQt5.QtCore import QDate
from PyQt5.QtCore import QDateTime
from PyQt5.QtCore import QItemSelection
from PyQt5.QtCore import QModelIndex
# If set to true, things like QModelIndexes will show their parent
# methods such as __repr__
from PyQt5.QtCore import QPointF
show_recursive_relationships = __debug__ # FIXME Should enable via cmdopt
def _qdate__repr__(qdate):
return "<QDate({}-{}-{})>".format(qdate.year(), qdate.month(), qdate.day())
def _qdatetime__repr__(qdatetime):
date, time = qdatetime.date(), qdatetime.time()
return "<QDateTime({}-{}-{} {}:{}:{})>".format(date.year(),
date.month(),
date.day(),
time.hour(),
time.minute(),
time.second())
def _qitemselection__repr__(qitemselection):
indexes = qitemselection.indexes()
return "<QItemSelection({},{})>".format(len(indexes), indexes)
def _qmodelindex__repr__(index):
if index.isValid():
parent = index.parent()
if show_recursive_relationships:
parent_str = "{}".format(parent)
else:
parent_str = "{}".format(type(parent))
return "<QModelIndex({}, {}, parent={}, model={})>".format(index.row(),
index.column(),
parent_str,
index.model())
else:
return "<QModelIndex(<invalid>, model={})>".format(index.model())
def _qpointf__repr__(qpointf):
return "QPointF({}, {})".format(qpointf.x(), qpointf.y())
def install_qt_reprs():
QDate.__repr__ = _qdate__repr__
QDateTime.__repr__ = _qdatetime__repr__
QItemSelection.__repr__ = _qitemselection__repr__
QModelIndex.__repr__ = _qmodelindex__repr__
QPointF.__repr__ = _qpointf__repr__
def install_hacks():
install_qt_reprs()
|
gpl-2.0
| 6,981,152,763,329,449,000 | 34.285714 | 82 | 0.568826 | false | 4.151261 | false | false | false |
F5Networks/f5-common-python
|
f5/bigip/tm/util/test/unit/test_get_dossier.py
|
1
|
1572
|
# Copyright 2016 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import mock
import pytest
from f5.bigip import ManagementRoot
from f5.bigip.tm.util.get_dossier import Get_Dossier
@pytest.fixture
def FakeGetDossier():
fake_sys = mock.MagicMock()
fake_get_dossier = Get_Dossier(fake_sys)
return fake_get_dossier
@pytest.fixture
def FakeiControl(fakeicontrolsession):
mr = ManagementRoot('host', 'fake_admin', 'fake_admin')
mock_session = mock.MagicMock()
mock_session.post.return_value.json.return_value = {}
mr._meta_data['icr_session'] = mock_session
return mr.tm.util.get_dossier
class TestGetDossierCommand(object):
def test_command_get_dossier(self, FakeiControl):
FakeiControl.exec_cmd('run', utilCmdArgs='-b registration-key')
session = FakeiControl._meta_data['bigip']._meta_data['icr_session']
assert session.post.call_args == mock.call(
'https://host:443/mgmt/tm/util/get-dossier/',
json={'utilCmdArgs': '-b registration-key', 'command': 'run'}
)
|
apache-2.0
| 1,810,308,920,897,840,400 | 33.173913 | 76 | 0.713104 | false | 3.462555 | false | false | false |
cirosantilli/python-utils
|
sandbox/elearning/generate_tocs.py
|
1
|
2277
|
import os.path
from xml.dom.minidom import parse
home_dir = os.path.dirname(os.path.dirname(__file__)) #elearning/
html_path = os.path.join(home_dir,'toc.html') #elearning/toc.html
tocs_root_rel_path = 'sidebars'
tocs_path = os.path.join(home_dir,tocs_root_rel_path) #elearning/tocs partial tocs home
class_name = 'nav_tree'
#takes full tree html and generates partial tocs with breadcrumbs in #elearning/tocs dir
def generate_partial_tocs(html_path,tocs_path):
root = parse(html_path)
remove_whilespace_nodes(root,True) #simpler without beautification blank
lis = root.getElementsByTagName('li')
for li in lis:
anc = li.childNodes[0]
if(anc.nodeType == anc.ELEMENT_NODE and anc.localName == "a"):
id = anc.attributes["href"].value[1:]
print '<ul class="'+class_name+'">' + li_ascendants(root,li) + li.toxml() + '</ul>'
#lists ascendants list link up to root.
def li_ascendants(root,li):
result = ''
print 'NODE:\n\n' + li.toxml() + '\n\n'
li.childNodes[0]
ul = li.parentNode
while(not ul is root):
li = ul.parentNode
result += li.childNodes[0].toxml() # should add the hole
ul = li.parentNode
return result
#to simplify tasks
def remove_whilespace_nodes(node, unlink=False):
"""Removes all of the whitespace-only text decendants of a DOM node.
When creating a DOM from an XML source, XML parsers are required to
consider several conditions when deciding whether to include
whitespace-only text nodes. This function ignores all of those
conditions and removes all whitespace-only text decendants of the
specified node. If the unlink flag is specified, the removed text
nodes are unlinked so that their storage can be reclaimed. If the
specified node is a whitespace-only text node then it is left
unmodified."""
remove_list = []
for child in node.childNodes:
if child.nodeType == child.TEXT_NODE and \
not child.data.strip():
remove_list.append(child)
elif child.hasChildNodes():
remove_whilespace_nodes(child, unlink)
for node in remove_list:
node.parentNode.removeChild(node)
if unlink:
node.unlink()
if __name__ == '__main__':
generate_partial_tocs(html_path,tocs_path)
|
mit
| 458,288,741,566,199,200 | 37.610169 | 88 | 0.684673 | false | 3.481651 | false | false | false |
Autodesk/molecular-design-toolkit
|
moldesign/compute/remote_procedure_calls.py
|
1
|
5016
|
from __future__ import print_function, absolute_import, division
from future.builtins import *
from future import standard_library
standard_library.install_aliases()
# Copyright 2017 Autodesk Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import types
import future.utils
from pyccc import python as bpy
import moldesign as mdt
from moldesign import utils
from . import configuration, run_job
from ..helpers import display_log
class RpcWrapper(object):
""" A wrapper that lets to transparently execute python functions in remote
environments - usually in docker containers.
These wrappers are built to allow a lot of run-time flexibility based on the description
of the package (``self.pkg``) that's being called.
Note:
This ONLY works for pure functions - where you're interested in the
return value only. Side effects - including any object state - will be discarded.
Args:
pkg (mdt.compute.packages.InterfacedPackage): package to run this command with
display (bool): Create a jupyter logging display for the remote job
(default: True in Jupyter notebooks, False otherwise)
jobname (str): Name metadata - defaults to the __name__ of the function
sendsource (bool): if False (default), call this function directly on the remote worker;
if True, send the function's source code (for debugging, mostly)
persist_refs (bool): Persist python object references across the RPC roundtrip
is_imethod (bool): This is an instancemethod
Note: we can't determine this at import-time without going to great lengths ...
- see, e.g., http://stackoverflow.com/questions/2366713/ )
"""
def __init__(self, pkg,
display=True,
jobname=None,
sendsource=False,
is_imethod=False,
persist_refs=False):
self.pkg = pkg
self.display = display
self.sendsource = sendsource
self.jobname = jobname
self.is_imethod = is_imethod
self.persist_refs = persist_refs
def __call__(self, func):
"""
This gets called with the function we wish to wrap
"""
from .compute import get_image_path
assert callable(func)
if self.jobname is None:
self.jobname = func.__name__
assert func.__name__ != 'wrapper' # who wraps the wrappers?
@utils.args_from(func,
wraps=True,
inject_kwargs={'wait': True})
def wrapper(*args, **kwargs):
""" Wraps a python function so that it will be executed remotely using a compute engine
Note:
At runtime, this documentation should be replaced with that of the wrapped function
"""
f = func # keeps a reference to the original function in this closure
wait = kwargs.get('wait', True)
if wait and not self.pkg.force_remote:
return f(*args, **kwargs)
# Bind instance methods to their objects
if self.is_imethod:
f, args = _bind_instance_method(f, args)
# Submit job to remote engine
python_call = bpy.PythonCall(f, *args, **kwargs)
engine = utils.if_not_none(self.pkg.engine, mdt.compute.get_engine())
job = bpy.PythonJob(engine=engine,
image=self.pkg.get_docker_image_path(),
command=python_call,
name=self.jobname,
sendsource=self.sendsource,
interpreter='python', # always run in image's native interpreter
persist_references=self.persist_refs,
submit=False)
return run_job(job, wait=wait, _return_result=True)
wrapper.__name__ = func.__name__
wrapper.__wrapped__ = func
return wrapper
def _bind_instance_method(f, args):
# We can't call this function like normal, because the decorators can't identify
# instance methods. Instead, we'll create another bound copy of the instancemethod (probably
# only need to do this once)
fn_self = args[0]
if future.utils.PY2 == 2:
f = types.MethodType(f, fn_self, fn_self.__class__)
else:
f = types.MethodType(f, fn_self)
args = args[1:]
return f, args
|
apache-2.0
| -2,045,807,102,273,718,300 | 38.496063 | 99 | 0.61862 | false | 4.454707 | false | false | false |
astagi/chickenfoot
|
test.py
|
1
|
1972
|
from chickenfoot import Chickenfoot
import socket
class TestChickenfootClient():
def setUp(self):
TCP_IP = '192.168.0.6'
TCP_PORT = 5005
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s.connect((TCP_IP, TCP_PORT))
def tearDown(self):
self.s.close()
def test_moves(self):
self.left()
self.right()
self.stop_wheel()
self.up()
self.down()
self.stop()
def left(self):
bundle = """
{
"m" : "M1",
"a" : "rl",
"p" : {
"p1name" : "p1",
"p2name": 5
}
}
"""
assert self.__send(bundle)
def right(self):
bundle = """
{
"m" : "M1",
"a" : "rr",
"p" : {
"p1name" : "p1"
}
}
"""
assert self.__send(bundle)
def up(self):
bundle = """
{
"m" : "M2",
"a" : "fw",
"p" : {
"p1name" : "p1",
"p2name" : "p2"
}
}
"""
assert self.__send(bundle)
def down(self):
bundle = """
{
"m" : "M2",
"a" : "rw"
}
"""
assert self.__send(bundle)
def stop(self):
bundle = """
{
"m" : "M1",
"a" : "stop",
"p" : {
"p1name" : "stop"
}
}
"""
assert self.__send(bundle)
def stop_wheel(self):
bundle = """
{
"m" : "M2",
"a" : "stop",
"p" : {
"p1name" : "stop_wheel"
}
}
"""
assert self.__send(bundle)
def __send(self, data):
byte_to_send = len(data) + 1
byte_sent = self.s.send(data + "\n")
return byte_sent == byte_to_send
|
mit
| -5,306,917,132,645,212,000 | 19.340206 | 66 | 0.345842 | false | 3.591985 | false | false | false |
rossella/neutron
|
quantum/api/extensions.py
|
1
|
22315
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from abc import ABCMeta
import imp
import os
import routes
import webob.dec
import webob.exc
from quantum.api.v2 import attributes
from quantum.common import constants
from quantum.common import exceptions
import quantum.extensions
from quantum.manager import QuantumManager
from quantum.openstack.common import cfg
from quantum.openstack.common import log as logging
from quantum import wsgi
LOG = logging.getLogger('quantum.api.extensions')
class PluginInterface(object):
__metaclass__ = ABCMeta
@classmethod
def __subclasshook__(cls, klass):
"""
The __subclasshook__ method is a class method
that will be called everytime a class is tested
using issubclass(klass, PluginInterface).
In that case, it will check that every method
marked with the abstractmethod decorator is
provided by the plugin class.
"""
for method in cls.__abstractmethods__:
if any(method in base.__dict__ for base in klass.__mro__):
continue
return NotImplemented
return True
class ExtensionDescriptor(object):
"""Base class that defines the contract for extensions.
Note that you don't have to derive from this class to have a valid
extension; it is purely a convenience.
"""
def get_name(self):
"""The name of the extension.
e.g. 'Fox In Socks'
"""
raise NotImplementedError()
def get_alias(self):
"""The alias for the extension.
e.g. 'FOXNSOX'
"""
raise NotImplementedError()
def get_description(self):
"""Friendly description for the extension.
e.g. 'The Fox In Socks Extension'
"""
raise NotImplementedError()
def get_namespace(self):
"""The XML namespace for the extension.
e.g. 'http://www.fox.in.socks/api/ext/pie/v1.0'
"""
raise NotImplementedError()
def get_updated(self):
"""The timestamp when the extension was last updated.
e.g. '2011-01-22T13:25:27-06:00'
"""
# NOTE(justinsb): Not sure of the purpose of this is, vs the XML NS
raise NotImplementedError()
def get_resources(self):
"""List of extensions.ResourceExtension extension objects.
Resources define new nouns, and are accessible through URLs.
"""
resources = []
return resources
def get_actions(self):
"""List of extensions.ActionExtension extension objects.
Actions are verbs callable from the API.
"""
actions = []
return actions
def get_request_extensions(self):
"""List of extensions.RequestException extension objects.
Request extensions are used to handle custom request data.
"""
request_exts = []
return request_exts
def get_extended_resources(self, version):
"""retrieve extended resources or attributes for core resources.
Extended attributes are implemented by a core plugin similarly
to the attributes defined in the core, and can appear in
request and response messages. Their names are scoped with the
extension's prefix. The core API version is passed to this
function, which must return a
map[<resource_name>][<attribute_name>][<attribute_property>]
specifying the extended resource attribute properties required
by that API version.
Extension can add resources and their attr definitions too.
The returned map can be integrated into RESOURCE_ATTRIBUTE_MAP.
"""
return {}
def get_plugin_interface(self):
"""
Returns an abstract class which defines contract for the plugin.
The abstract class should inherit from extesnions.PluginInterface,
Methods in this abstract class should be decorated as abstractmethod
"""
return None
class ActionExtensionController(wsgi.Controller):
def __init__(self, application):
self.application = application
self.action_handlers = {}
def add_action(self, action_name, handler):
self.action_handlers[action_name] = handler
def action(self, request, id):
input_dict = self._deserialize(request.body,
request.get_content_type())
for action_name, handler in self.action_handlers.iteritems():
if action_name in input_dict:
return handler(input_dict, request, id)
# no action handler found (bump to downstream application)
response = self.application
return response
class RequestExtensionController(wsgi.Controller):
def __init__(self, application):
self.application = application
self.handlers = []
def add_handler(self, handler):
self.handlers.append(handler)
def process(self, request, *args, **kwargs):
res = request.get_response(self.application)
# currently request handlers are un-ordered
for handler in self.handlers:
response = handler(request, res)
return response
class ExtensionController(wsgi.Controller):
def __init__(self, extension_manager):
self.extension_manager = extension_manager
def _translate(self, ext):
ext_data = {}
ext_data['name'] = ext.get_name()
ext_data['alias'] = ext.get_alias()
ext_data['description'] = ext.get_description()
ext_data['namespace'] = ext.get_namespace()
ext_data['updated'] = ext.get_updated()
ext_data['links'] = [] # TODO(dprince): implement extension links
return ext_data
def index(self, request):
extensions = []
for _alias, ext in self.extension_manager.extensions.iteritems():
extensions.append(self._translate(ext))
return dict(extensions=extensions)
def show(self, request, id):
# NOTE(dprince): the extensions alias is used as the 'id' for show
ext = self.extension_manager.extensions.get(id, None)
if not ext:
raise webob.exc.HTTPNotFound(
_("Extension with alias %s does not exist") % id)
return dict(extension=self._translate(ext))
def delete(self, request, id):
raise webob.exc.HTTPNotFound()
def create(self, request):
raise webob.exc.HTTPNotFound()
class ExtensionMiddleware(wsgi.Middleware):
"""Extensions middleware for WSGI."""
def __init__(self, application,
ext_mgr=None):
self.ext_mgr = (ext_mgr
or ExtensionManager(
get_extensions_path()))
mapper = routes.Mapper()
# extended resources
for resource in self.ext_mgr.get_resources():
path_prefix = resource.path_prefix
if resource.parent:
path_prefix = (resource.path_prefix +
"/%s/{%s_id}" %
(resource.parent["collection_name"],
resource.parent["member_name"]))
LOG.debug(_('Extended resource: %s'),
resource.collection)
for action, method in resource.collection_actions.iteritems():
conditions = dict(method=[method])
path = "/%s/%s" % (resource.collection, action)
with mapper.submapper(controller=resource.controller,
action=action,
path_prefix=path_prefix,
conditions=conditions) as submap:
submap.connect(path)
submap.connect("%s.:(format)" % path)
mapper.resource(resource.collection, resource.collection,
controller=resource.controller,
member=resource.member_actions,
parent_resource=resource.parent,
path_prefix=path_prefix)
# extended actions
action_controllers = self._action_ext_controllers(application,
self.ext_mgr, mapper)
for action in self.ext_mgr.get_actions():
LOG.debug(_('Extended action: %s'), action.action_name)
controller = action_controllers[action.collection]
controller.add_action(action.action_name, action.handler)
# extended requests
req_controllers = self._request_ext_controllers(application,
self.ext_mgr, mapper)
for request_ext in self.ext_mgr.get_request_extensions():
LOG.debug(_('Extended request: %s'), request_ext.key)
controller = req_controllers[request_ext.key]
controller.add_handler(request_ext.handler)
self._router = routes.middleware.RoutesMiddleware(self._dispatch,
mapper)
super(ExtensionMiddleware, self).__init__(application)
@classmethod
def factory(cls, global_config, **local_config):
"""Paste factory."""
def _factory(app):
return cls(app, global_config, **local_config)
return _factory
def _action_ext_controllers(self, application, ext_mgr, mapper):
"""Return a dict of ActionExtensionController-s by collection."""
action_controllers = {}
for action in ext_mgr.get_actions():
if action.collection not in action_controllers.keys():
controller = ActionExtensionController(application)
mapper.connect("/%s/:(id)/action.:(format)" %
action.collection,
action='action',
controller=controller,
conditions=dict(method=['POST']))
mapper.connect("/%s/:(id)/action" % action.collection,
action='action',
controller=controller,
conditions=dict(method=['POST']))
action_controllers[action.collection] = controller
return action_controllers
def _request_ext_controllers(self, application, ext_mgr, mapper):
"""Returns a dict of RequestExtensionController-s by collection."""
request_ext_controllers = {}
for req_ext in ext_mgr.get_request_extensions():
if req_ext.key not in request_ext_controllers.keys():
controller = RequestExtensionController(application)
mapper.connect(req_ext.url_route + '.:(format)',
action='process',
controller=controller,
conditions=req_ext.conditions)
mapper.connect(req_ext.url_route,
action='process',
controller=controller,
conditions=req_ext.conditions)
request_ext_controllers[req_ext.key] = controller
return request_ext_controllers
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
"""Route the incoming request with router."""
req.environ['extended.app'] = self.application
return self._router
@staticmethod
@webob.dec.wsgify(RequestClass=wsgi.Request)
def _dispatch(req):
"""Dispatch the request.
Returns the routed WSGI app's response or defers to the extended
application.
"""
match = req.environ['wsgiorg.routing_args'][1]
if not match:
return req.environ['extended.app']
app = match['controller']
return app
def plugin_aware_extension_middleware_factory(global_config, **local_config):
"""Paste factory."""
def _factory(app):
ext_mgr = PluginAwareExtensionManager.get_instance()
return ExtensionMiddleware(app, ext_mgr=ext_mgr)
return _factory
class ExtensionManager(object):
"""Load extensions from the configured extension path.
See tests/unit/extensions/foxinsocks.py for an
example extension implementation.
"""
def __init__(self, path):
LOG.info(_('Initializing extension manager.'))
self.path = path
self.extensions = {}
self._load_all_extensions()
def get_resources(self):
"""Returns a list of ResourceExtension objects."""
resources = []
resources.append(ResourceExtension('extensions',
ExtensionController(self)))
for ext in self.extensions.itervalues():
try:
resources.extend(ext.get_resources())
except AttributeError:
# NOTE(dprince): Extension aren't required to have resource
# extensions
pass
return resources
def get_actions(self):
"""Returns a list of ActionExtension objects."""
actions = []
for ext in self.extensions.itervalues():
try:
actions.extend(ext.get_actions())
except AttributeError:
# NOTE(dprince): Extension aren't required to have action
# extensions
pass
return actions
def get_request_extensions(self):
"""Returns a list of RequestExtension objects."""
request_exts = []
for ext in self.extensions.itervalues():
try:
request_exts.extend(ext.get_request_extensions())
except AttributeError:
# NOTE(dprince): Extension aren't required to have request
# extensions
pass
return request_exts
def extend_resources(self, version, attr_map):
"""Extend resources with additional resources or attributes.
:param: attr_map, the existing mapping from resource name to
attrs definition.
After this function, we will extend the attr_map if an extension
wants to extend this map.
"""
for ext in self.extensions.itervalues():
if not hasattr(ext, 'get_extended_resources'):
continue
try:
extended_attrs = ext.get_extended_resources(version)
for resource, resource_attrs in extended_attrs.iteritems():
if attr_map.get(resource, None):
attr_map[resource].update(resource_attrs)
else:
attr_map[resource] = resource_attrs
if extended_attrs:
attributes.EXT_NSES[ext.get_alias()] = ext.get_namespace()
except AttributeError:
LOG.exception(_("Error fetching extended attributes for "
"extension '%s'"), ext.get_name())
def _check_extension(self, extension):
"""Checks for required methods in extension objects."""
try:
LOG.debug(_('Ext name: %s'), extension.get_name())
LOG.debug(_('Ext alias: %s'), extension.get_alias())
LOG.debug(_('Ext description: %s'), extension.get_description())
LOG.debug(_('Ext namespace: %s'), extension.get_namespace())
LOG.debug(_('Ext updated: %s'), extension.get_updated())
except AttributeError as ex:
LOG.exception(_("Exception loading extension: %s"), unicode(ex))
return False
if hasattr(extension, 'check_env'):
try:
extension.check_env()
except exceptions.InvalidExtenstionEnv as ex:
LOG.warn(_("Exception loading extension: %s"), unicode(ex))
return False
return True
def _load_all_extensions(self):
"""Load extensions from the configured path.
Load extensions from the configured path. The extension name is
constructed from the module_name. If your extension module was named
widgets.py the extension class within that module should be
'Widgets'.
See tests/unit/extensions/foxinsocks.py for an example
extension implementation.
"""
for path in self.path.split(':'):
if os.path.exists(path):
self._load_all_extensions_from_path(path)
else:
LOG.error(_("Extension path '%s' doesn't exist!"), path)
def _load_all_extensions_from_path(self, path):
for f in os.listdir(path):
try:
LOG.info(_('Loading extension file: %s'), f)
mod_name, file_ext = os.path.splitext(os.path.split(f)[-1])
ext_path = os.path.join(path, f)
if file_ext.lower() == '.py' and not mod_name.startswith('_'):
mod = imp.load_source(mod_name, ext_path)
ext_name = mod_name[0].upper() + mod_name[1:]
new_ext_class = getattr(mod, ext_name, None)
if not new_ext_class:
LOG.warn(_('Did not find expected name '
'"%(ext_name)s" in %(file)s'),
{'ext_name': ext_name,
'file': ext_path})
continue
new_ext = new_ext_class()
self.add_extension(new_ext)
except Exception as exception:
LOG.warn(_("Extension file %(f)s wasn't loaded due to "
"%(exception)s"), locals())
def add_extension(self, ext):
# Do nothing if the extension doesn't check out
if not self._check_extension(ext):
return
alias = ext.get_alias()
LOG.info(_('Loaded extension: %s'), alias)
if alias in self.extensions:
raise exceptions.Error(_("Found duplicate extension: %s") %
alias)
self.extensions[alias] = ext
class PluginAwareExtensionManager(ExtensionManager):
_instance = None
def __init__(self, path, plugins):
self.plugins = plugins
super(PluginAwareExtensionManager, self).__init__(path)
def _check_extension(self, extension):
"""Checks if any of plugins supports extension and implements the
extension contract."""
extension_is_valid = super(PluginAwareExtensionManager,
self)._check_extension(extension)
return (extension_is_valid and
self._plugins_support(extension) and
self._plugins_implement_interface(extension))
def _plugins_support(self, extension):
alias = extension.get_alias()
supports_extension = any((hasattr(plugin,
"supported_extension_aliases") and
alias in plugin.supported_extension_aliases)
for plugin in self.plugins.values())
if not supports_extension:
LOG.warn(_("Extension %s not supported by any of loaded plugins"),
alias)
return supports_extension
def _plugins_implement_interface(self, extension):
if(not hasattr(extension, "get_plugin_interface") or
extension.get_plugin_interface() is None):
return True
for plugin in self.plugins.values():
if isinstance(plugin, extension.get_plugin_interface()):
return True
LOG.warn(_("Loaded plugins do not implement extension %s interface"),
extension.get_alias())
return False
@classmethod
def get_instance(cls):
if cls._instance is None:
cls._instance = cls(get_extensions_path(),
QuantumManager.get_service_plugins())
return cls._instance
class RequestExtension(object):
"""Extend requests and responses of core Quantum OpenStack API controllers.
Provide a way to add data to responses and handle custom request data
that is sent to core Quantum OpenStack API controllers.
"""
def __init__(self, method, url_route, handler):
self.url_route = url_route
self.handler = handler
self.conditions = dict(method=[method])
self.key = "%s-%s" % (method, url_route)
class ActionExtension(object):
"""Add custom actions to core Quantum OpenStack API controllers."""
def __init__(self, collection, action_name, handler):
self.collection = collection
self.action_name = action_name
self.handler = handler
class ResourceExtension(object):
"""Add top level resources to the OpenStack API in Quantum."""
def __init__(self, collection, controller, parent=None, path_prefix="",
collection_actions={}, member_actions={}, attr_map={}):
self.collection = collection
self.controller = controller
self.parent = parent
self.collection_actions = collection_actions
self.member_actions = member_actions
self.path_prefix = path_prefix
self.attr_map = attr_map
# Returns the extention paths from a config entry and the __path__
# of quantum.extensions
def get_extensions_path():
paths = ':'.join(quantum.extensions.__path__)
if cfg.CONF.api_extensions_path:
paths = ':'.join([cfg.CONF.api_extensions_path, paths])
return paths
|
apache-2.0
| 665,210,720,194,459,000 | 35.581967 | 79 | 0.585122 | false | 4.805125 | false | false | false |
dbarenas/django-scheduler
|
agenda_template/agenda_template/schedule/periods.py
|
1
|
14782
|
from __future__ import unicode_literals
from builtins import range
from builtins import object
import pytz
import datetime
import calendar as standardlib_calendar
from django.conf import settings
from django.utils.encoding import python_2_unicode_compatible
from django.template.defaultfilters import date as date_filter
from django.utils.dates import WEEKDAYS, WEEKDAYS_ABBR
from schedule.conf.settings import SHOW_CANCELLED_OCCURRENCES
from schedule.models import Occurrence
from django.utils import timezone
weekday_names = []
weekday_abbrs = []
if settings.FIRST_DAY_OF_WEEK == 1:
# The calendar week starts on Monday
for i in range(7):
weekday_names.append(WEEKDAYS[i])
weekday_abbrs.append(WEEKDAYS_ABBR[i])
else:
# The calendar week starts on Sunday, not Monday
weekday_names.append(WEEKDAYS[6])
weekday_abbrs.append(WEEKDAYS_ABBR[6])
for i in range(6):
weekday_names.append(WEEKDAYS[i])
weekday_abbrs.append(WEEKDAYS_ABBR[i])
class Period(object):
"""
This class represents a period of time. It can return a set of occurrences
based on its events, and its time period (start and end).
"""
def __init__(self, events, start, end, parent_persisted_occurrences=None,
occurrence_pool=None, tzinfo=pytz.utc):
self.utc_start = self._normalize_timezone_to_utc(start, tzinfo)
self.utc_end = self._normalize_timezone_to_utc(end, tzinfo)
self.events = events
self.tzinfo = self._get_tzinfo(tzinfo)
self.occurrence_pool = occurrence_pool
if parent_persisted_occurrences is not None:
self._persisted_occurrences = parent_persisted_occurrences
def _normalize_timezone_to_utc(self, point_in_time, tzinfo):
if point_in_time.tzinfo is not None:
return point_in_time.astimezone(pytz.utc)
if tzinfo is not None:
return tzinfo.localize(point_in_time).astimezone(pytz.utc)
if settings.USE_TZ:
return pytz.utc.localize(point_in_time)
else:
if timezone.is_aware(point_in_time):
return timezone.make_naive(point_in_time, pytz.utc)
else:
return point_in_time
def __eq__(self, period):
return self.utc_start == period.utc_start and self.utc_end == period.utc_end and self.events == period.events
def __ne__(self, period):
return self.utc_start != period.utc_start or self.utc_end != period.utc_end or self.events != period.events
def _get_tzinfo(self, tzinfo):
return tzinfo if settings.USE_TZ else None
def _get_sorted_occurrences(self):
occurrences = []
if hasattr(self, "occurrence_pool") and self.occurrence_pool is not None:
for occurrence in self.occurrence_pool:
if occurrence.start <= self.utc_end and occurrence.end >= self.utc_start:
occurrences.append(occurrence)
return occurrences
for event in self.events:
event_occurrences = event.get_occurrences(self.start, self.end)
occurrences += event_occurrences
return sorted(occurrences)
def cached_get_sorted_occurrences(self):
if hasattr(self, '_occurrences'):
return self._occurrences
occs = self._get_sorted_occurrences()
self._occurrences = occs
return occs
occurrences = property(cached_get_sorted_occurrences)
def get_persisted_occurrences(self):
if hasattr(self, '_persisted_occurrenes'):
return self._persisted_occurrences
else:
self._persisted_occurrences = Occurrence.objects.filter(event__in=self.events)
return self._persisted_occurrences
def classify_occurrence(self, occurrence):
if occurrence.cancelled and not SHOW_CANCELLED_OCCURRENCES:
return
if occurrence.start > self.end or occurrence.end < self.start:
return None
started = False
ended = False
if self.utc_start <= occurrence.start < self.utc_end:
started = True
if self.utc_start <= occurrence.end < self.utc_end:
ended = True
if started and ended:
return {'occurrence': occurrence, 'class': 1}
elif started:
return {'occurrence': occurrence, 'class': 0}
elif ended:
return {'occurrence': occurrence, 'class': 3}
# it existed during this period but it didn't begin or end within it
# so it must have just continued
return {'occurrence': occurrence, 'class': 2}
def get_occurrence_partials(self):
occurrence_dicts = []
for occurrence in self.occurrences:
occurrence = self.classify_occurrence(occurrence)
if occurrence:
occurrence_dicts.append(occurrence)
return occurrence_dicts
def get_occurrences(self):
return self.occurrences
def has_occurrences(self):
return any(self.classify_occurrence(o) for o in self.occurrences)
def get_time_slot(self, start, end):
if start >= self.start and end <= self.end:
return Period(self.events, start, end)
return None
def create_sub_period(self, cls, start=None, tzinfo=None):
if tzinfo is None:
tzinfo = self.tzinfo
start = start or self.start
return cls(self.events, start, self.get_persisted_occurrences(), self.occurrences, tzinfo)
def get_periods(self, cls, tzinfo=None):
if tzinfo is None:
tzinfo = self.tzinfo
period = self.create_sub_period(cls)
while period.start < self.end:
yield self.create_sub_period(cls, period.start, tzinfo)
period = next(period)
@property
def start(self):
if self.tzinfo is not None:
return self.utc_start.astimezone(self.tzinfo)
return self.utc_start.replace(tzinfo=None)
@property
def end(self):
if self.tzinfo is not None:
return self.utc_end.astimezone(self.tzinfo)
return self.utc_end.replace(tzinfo=None)
@python_2_unicode_compatible
class Year(Period):
def __init__(self, events, date=None, parent_persisted_occurrences=None, tzinfo=pytz.utc):
self.tzinfo = self._get_tzinfo(tzinfo)
if date is None:
date = timezone.now()
start, end = self._get_year_range(date)
super(Year, self).__init__(events, start, end, parent_persisted_occurrences, tzinfo=tzinfo)
def get_months(self):
return self.get_periods(Month)
def next_year(self):
return Year(self.events, self.end, tzinfo=self.tzinfo)
__next__ = next_year
def prev_year(self):
start = datetime.datetime(self.start.year - 1, self.start.month, self.start.day)
return Year(self.events, start, tzinfo=self.tzinfo)
prev = prev_year
def _get_year_range(self, year):
#If tzinfo is not none get the local start of the year and convert it to utc.
naive_start = datetime.datetime(year.year, datetime.datetime.min.month, datetime.datetime.min.day)
naive_end = datetime.datetime(year.year + 1, datetime.datetime.min.month, datetime.datetime.min.day)
start = naive_start
end = naive_end
if self.tzinfo is not None:
local_start = self.tzinfo.localize(naive_start)
local_end = self.tzinfo.localize(naive_end)
start = local_start.astimezone(pytz.utc)
end = local_end.astimezone(pytz.utc)
return start, end
def __str__(self):
return self.start.year
@python_2_unicode_compatible
class Month(Period):
"""
The month period has functions for retrieving the week periods within this period
and day periods within the date.
"""
def __init__(self, events, date=None, parent_persisted_occurrences=None,
occurrence_pool=None, tzinfo=pytz.utc):
self.tzinfo = self._get_tzinfo(tzinfo)
if date is None:
date = timezone.now()
start, end = self._get_month_range(date)
super(Month, self).__init__(events, start, end,
parent_persisted_occurrences, occurrence_pool, tzinfo=tzinfo)
def get_weeks(self):
return self.get_periods(Week)
def get_days(self):
return self.get_periods(Day)
def get_day(self, daynumber):
date = self.start
if daynumber > 1:
date += datetime.timedelta(days=daynumber - 1)
return self.create_sub_period(Day, date)
def next_month(self):
return Month(self.events, self.end, tzinfo=self.tzinfo)
__next__ = next_month
def prev_month(self):
start = (self.start - datetime.timedelta(days=1)).replace(day=1, tzinfo=self.tzinfo)
return Month(self.events, start, tzinfo=self.tzinfo)
prev = prev_month
def current_year(self):
return Year(self.events, self.start, tzinfo=self.tzinfo)
def prev_year(self):
start = datetime.datetime.min.replace(year=self.start.year - 1, tzinfo=self.tzinfo)
return Year(self.events, start, tzinfo=self.tzinfo)
def next_year(self):
start = datetime.datetime.min.replace(year=self.start.year + 1, tzinfo=self.tzinfo)
return Year(self.events, start, tzinfo=self.tzinfo)
def _get_month_range(self, month):
year = month.year
month = month.month
#If tzinfo is not none get the local start of the month and convert it to utc.
naive_start = datetime.datetime.min.replace(year=year, month=month)
if month == 12:
naive_end = datetime.datetime.min.replace(month=1, year=year + 1, day=1)
else:
naive_end = datetime.datetime.min.replace(month=month + 1, year=year, day=1)
start = naive_start
end = naive_end
if self.tzinfo is not None:
local_start = self.tzinfo.localize(naive_start)
local_end = self.tzinfo.localize(naive_end)
start = local_start.astimezone(pytz.utc)
end = local_end.astimezone(pytz.utc)
return start, end
def __str__(self):
return self.name()
def name(self):
return standardlib_calendar.month_name[self.start.month]
def year(self):
return self.start.year
@python_2_unicode_compatible
class Week(Period):
"""
The Week period that has functions for retrieving Day periods within it
"""
def __init__(self, events, date=None, parent_persisted_occurrences=None,
occurrence_pool=None, tzinfo=pytz.utc):
self.tzinfo = self._get_tzinfo(tzinfo)
if date is None:
date = timezone.now()
start, end = self._get_week_range(date)
super(Week, self).__init__(events, start, end,
parent_persisted_occurrences, occurrence_pool, tzinfo=tzinfo)
def prev_week(self):
return Week(self.events, self.start - datetime.timedelta(days=7), tzinfo=self.tzinfo)
prev = prev_week
def next_week(self):
return Week(self.events, self.end, tzinfo=self.tzinfo)
__next__ = next_week
def current_month(self):
return Month(self.events, self.start, tzinfo=self.tzinfo)
def current_year(self):
return Year(self.events, self.start, tzinfo=self.tzinfo)
def get_days(self):
return self.get_periods(Day)
def _get_week_range(self, week):
if isinstance(week, datetime.datetime):
week = week.date()
# Adjust the start datetime to midnight of the week datetime
naive_start = datetime.datetime.combine(week, datetime.time.min)
# Adjust the start datetime to Monday or Sunday of the current week
if settings.FIRST_DAY_OF_WEEK == 1:
# The week begins on Monday
sub_days = naive_start.isoweekday() - 1
else:
# The week begins on Sunday
sub_days = naive_start.isoweekday()
if sub_days == 7:
sub_days = 0
if sub_days > 0:
naive_start = naive_start - datetime.timedelta(days=sub_days)
naive_end = naive_start + datetime.timedelta(days=7)
if self.tzinfo is not None:
local_start = self.tzinfo.localize(naive_start)
local_end = self.tzinfo.localize(naive_end)
start = local_start.astimezone(pytz.utc)
end = local_end.astimezone(pytz.utc)
else:
start = naive_start
end = naive_end
return start, end
def __str__(self):
date_format = 'l, %s' % settings.DATE_FORMAT
return ugettext('Week: %(start)s-%(end)s') % {
'start': date_filter(self.start, date_format),
'end': date_filter(self.end, date_format),
}
@python_2_unicode_compatible
class Day(Period):
def __init__(self, events, date=None, parent_persisted_occurrences=None,
occurrence_pool=None, tzinfo=pytz.utc):
self.tzinfo = self._get_tzinfo(tzinfo)
if date is None:
date = timezone.now()
start, end = self._get_day_range(date)
super(Day, self).__init__(events, start, end,
parent_persisted_occurrences, occurrence_pool, tzinfo=tzinfo)
def _get_day_range(self, date):
if isinstance(date, datetime.datetime):
date = date.date()
naive_start = datetime.datetime.combine(date, datetime.time.min)
naive_end = datetime.datetime.combine(date + datetime.timedelta(days=1), datetime.time.min)
if self.tzinfo is not None:
local_start = self.tzinfo.localize(naive_start)
local_end = self.tzinfo.localize(naive_end)
start = local_start.astimezone(pytz.utc)
end = local_end.astimezone(pytz.utc)
else:
start = naive_start
end = naive_end
return start, end
def __str__(self):
date_format = 'l, %s' % settings.DATE_FORMAT
return ugettext('Day: %(start)s-%(end)s') % {
'start': date_filter(self.start, date_format),
'end': date_filter(self.end, date_format),
}
def prev_day(self):
return Day(self.events, self.start - datetime.timedelta(days=1), tzinfo=self.tzinfo)
prev = prev_day
def next_day(self):
return Day(self.events, self.end, tzinfo=self.tzinfo)
__next__ = next_day
def current_year(self):
return Year(self.events, self.start, tzinfo=self.tzinfo)
def current_month(self):
return Month(self.events, self.start, tzinfo=self.tzinfo)
def current_week(self):
return Week(self.events, self.start, tzinfo=self.tzinfo)
|
gpl-2.0
| 3,281,308,981,689,938,000 | 35.862843 | 117 | 0.625085 | false | 3.854498 | false | false | false |
arruda/presente_14
|
presente_14/chat_parser/html_parser.py
|
1
|
4871
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from bs4 import BeautifulSoup
from .chat_objects import *
def get_emails_html(path_to_html='emails.html'):
"returns the html from the emails file"
html = None
with open(path_to_html, 'r') as emails_file:
html = emails_file.read()
return html
def get_h2s_positions(html):
"return a list of all the index of H2 in the given html"
import re
starts = [match.start() for match in re.finditer(re.escape('<h2>'), html)]
return starts
def get_h3s_positions(html):
"return a list of all the index of H3 in the given html"
import re
starts = [match.start() for match in re.finditer(re.escape('<h3>'), html)]
return starts
def validate_conversation_group_html(html):
parsed_html = BeautifulSoup(html)
h2 = parsed_html.find('h2')
return 'Bate-papo' in h2.get_text()
def get_conversations_groups_html(html):
"returns a list of string that represent each conversations group of this html"
h2s_indexes = get_h2s_positions(html)
conversations_groups = []
last_h2_index = h2s_indexes[0]
for h2_index in h2s_indexes[1:]:
conversation_group_html = html[last_h2_index:h2_index]
if(validate_conversation_group_html(conversation_group_html)):
conversations_groups.append(conversation_group_html)
last_h2_index = h2_index
#: add the last one
conversation_group_html = html[last_h2_index:]
if(validate_conversation_group_html(conversation_group_html)):
conversations_groups.append(conversation_group_html)
return conversations_groups
def get_conversations_html(html):
"returns a list of string that represent each conversation of this html"
h3s_indexes = get_h3s_positions(html)
conversations = []
last_h3_index = h3s_indexes[0]
if len(h3s_indexes) > 1:
for h3_index in h3s_indexes[1:]:
conversation_html = html[last_h3_index:h3_index]
conversations.append(conversation_html)
last_h3_index = h3_index
#: add the last one
conversation_html = html[last_h3_index:]
conversations.append(conversation_html)
else:
conversation_html = html[last_h3_index:]
conversations.append(conversation_html)
return conversations
def get_messages(conversation_html):
"return the list of messages in a html"
parsed_html = BeautifulSoup(conversation_html)
msgs = []
span = parsed_html.find('span')
while span is not None:
msg, next_span = message_and_next_span_from_html(span)
msgs.append(msg)
span = next_span
return msgs
def message_and_next_span_from_html(span_html):
"return the Message object for this html and also the next span html"
author_span = span_html.findNext('span', attrs={'style': 'font-weight:bold'})
author = author_span.get_text().replace('eu', 'felipe').capitalize()
msg = span_html.get_text()
msg = remove_author_from_message(msg)
return Message(author, msg), author_span.findNext('span')
def remove_author_from_message(message_txt):
"removes the author from the message text"
first_ddot = message_txt.find(':')
message_txt = message_txt[first_ddot+2:]
return message_txt
def get_conversation_date(conversation_html):
"returns the date of the conversation html"
parsed_html = BeautifulSoup(conversation_html)
date = parsed_html.findAll('p')[1].get_text()
return date
def get_conversation_group(conversations_group_html):
"returns the conversation group of the given html"
conversation_list = []
for conversation_html in get_conversations_html(conversations_group_html):
msgs = get_messages(conversation_html)
date = get_conversation_date(conversation_html)
# if "Mar 21 2012 23:23:21" in date:
# import pdb;pdb.set_trace()
# print "a"
# 2012-03-21 23:23:21
conversation = Conversation(date, msgs)
conversation_list.append(conversation)
conversation_group = ConversationGroup(conversation_list)
return conversation_group
def perc_done(done, total):
"the percentage done of all the conversations groups"
print "%.f" % (done / total * 100), "%"
def parse_html(path_to_html):
"parse the emails html and return them in python objects"
html = get_emails_html(path_to_html)
conversations_group_list_html = get_conversations_groups_html(html)
total = len(conversations_group_list_html)
done = 0.0
conversations_group_list = []
for conversations_group_html in conversations_group_list_html:
perc_done(done, total)
conversations_group_list.append(get_conversation_group(conversations_group_html))
done = done + 1
perc_done(done, total)
return conversations_group_list
|
mit
| -3,933,352,032,407,904,000 | 30.425806 | 89 | 0.674605 | false | 3.408677 | false | false | false |
GeoMop/GeoMop
|
src/gm_base/model_data/export_con.py
|
1
|
4070
|
"""Module for exporting the data structure to .con format.
.. codeauthor:: Tomas Krizek <[email protected]>
"""
INDENTATION = ' '
class Exporter:
"""Exporter from data structure to con files."""
def __init__(self):
"""Initialize the class."""
self.lines = ['']
self.indent_level = 0
def export_con(self, root):
"""Create .con text from a root data node.
:param DataNode root: the root of the data structure
:return: text representation of the structure in .con format
:rtype: str
"""
self.lines = ['']
self.indent_level = 0
self._create_node(root)
return '\n'.join(self.lines)
def _print_line(self, text):
"""Append the text as indented line to the buffer.
:param str text: a line of text without the EOL symbol
"""
self.lines.append(self.indent_level * INDENTATION + text)
def _print(self, text):
"""Append the text to the last line."""
self.lines[-1] = self.lines[-1] + text
def _print_new_line(self, indent_change=0):
"""Append new line with the appropriate indentation.
:param int indent_change: +1, 0 or -1 to increase, keep or decrease indentation
"""
self.indent_level += indent_change
self.lines.append(self.indent_level * INDENTATION)
def _create_mapping_node(self, node):
"""Create a mapping node."""
self._print('{')
self._print_new_line(1)
# check for type
if node.type is not None:
self._print('TYPE = "{type}",'.format(type=node.type.value))
self._print_new_line()
# print all keys
for child in node.children:
self._print(child.key.value + ' = ')
self._create_node(child)
self._print(',')
self._print_new_line()
self.lines.pop() # remove last (extra) line
self.lines[-1] = self.lines[-1][:-1] # remove , from end of line
self._print_new_line(-1)
self._print('}')
def _create_node(self, node):
"""Create a node based on its type.
:param DataNode node: node to be create in text
"""
if node.ref is not None:
path = node.ref.absolute_path
self._create_reference(path)
else:
if node.implementation == node.Implementation.mapping:
self._create_mapping_node(node)
elif node.implementation == node.Implementation.scalar:
self._create_scalar_node(node)
elif node.implementation == node.Implementation.sequence:
self._create_sequence_node(node)
def _create_scalar_node(self, node):
"""Create a text representation of scalar node.
:param DataNode node: node
"""
if isinstance(node.value, bool):
self._print('true' if node.value else 'false')
elif isinstance(node.value, int):
self._print(str(node.value))
elif isinstance(node.value, float):
self._print(str(node.value))
else:
self._print('"' + node.value + '"')
def _create_sequence_node(self, node):
"""Create a text representation of sequence node.
:param DataNode node: node
"""
self._print('[')
self._print_new_line(1)
# print all keys
for child in node.children:
self._create_node(child)
self._print(',')
self._print_new_line()
self.lines.pop() # remove last (extra) line
self.lines[-1] = self.lines[-1][:-1] # remove , from end of line
self._print_new_line(-1)
self._print(']')
def _create_reference(self, path):
"""Create a reference node with the given absolute path."""
self._print('{')
self._print_new_line(1)
self._print('REF = "{ref}"'.format(ref=path))
self._print_new_line(-1)
self._print('}')
_exporter = Exporter()
export_con = _exporter.export_con
|
gpl-3.0
| -7,520,589,368,520,169,000 | 29.601504 | 87 | 0.561425 | false | 3.951456 | false | false | false |
emencia/emencia-django-forum
|
forum/settings.py
|
1
|
2607
|
"""
Default Forum settings to import/define in your project settings
"""
# Categories pagination in 'Category index' (=forum index) view
FORUM_CATEGORY_INDEX_PAGINATE = 6
# Threads pagination in 'Last threads' view
FORUM_LAST_THREAD_PAGINATE = 15
# Threads pagination in 'Category detail' view
FORUM_CATEGORY_THREAD_PAGINATE = 15
# Messages pagination in 'Thread detail' view
FORUM_THREAD_DETAIL_PAGINATE = 10
# If True message owner can edit its text, else only admin/moderate
FORUM_OWNER_MESSAGE_CAN_EDIT = True
# If True threadwatch checkbox is checked in thread create and post create forms
FORUM_DEFAULT_THREADWATCH_CHECKBOX = False
# Receiver function for signal when a new Post is created
FORUM_NEW_POST_SIGNAL = 'forum.signals.new_message_posted_receiver'
# Specific email sender address, if None. Use in the default new Post signal receiver
FORUM_EMAIL_SENDER = None
# Add new specific "rstview" parser settings for Forum app, if you have other apps
# that define parser settings this can lead to overwrite problems . In this
# case, just define all parser setting in 'RSTVIEW_PARSER_FILTER_SETTINGS' in
# the same settings file.
# WARNING: This should be removed, it must not be a default settings coming from forum
# Add it as a note in markup install doc
RSTVIEW_PARSER_FILTER_SETTINGS = {
'forum':{
'initial_header_level': 5,
'file_insertion_enabled': False,
'raw_enabled': False,
'footnote_references': 'superscript',
'doctitle_xform': False,
},
}
#
# Optionnal text markup settings
#
# Field helper for text in forms
FORUM_TEXT_FIELD_HELPER_PATH = None # Default, just a CharField
#FORUM_TEXT_FIELD_HELPER_PATH = "forum.markup.get_text_field" # Use DjangoCodeMirror
# Validator helper for Post.text in forms
FORUM_TEXT_VALIDATOR_HELPER_PATH = None # Default, no markup validation
#FORUM_TEXT_VALIDATOR_HELPER_PATH = "forum.markup.clean_restructuredtext" # Validation for RST syntax (with Rstview)
# Text markup renderer
FORUM_TEXT_MARKUP_RENDER_TEMPLATE = None # Default, just a CharField
#FORUM_TEXT_MARKUP_RENDER_TEMPLATE = "forum/markup/_text_markup_render.html" # Use Rstview renderer
# Template to init some Javascript for text in forms
FORUM_TEXT_FIELD_JS_TEMPLATE = None # Default, no JS template
#FORUM_TEXT_FIELD_JS_TEMPLATE = "forum/markup/_text_field_djangocodemirror_js.html" # Use DjangoCodeMirror
# Template to display author infos in thread's post list
FORUM_AUTHOR_VCARD_TEMPLATE = None # Default, only display the author username
#FORUM_AUTHOR_VCARD_TEMPLATE = "forum/author/_vcard.html" # Use Gravatar
|
mit
| -1,560,000,287,349,011,200 | 39.734375 | 116 | 0.754507 | false | 3.522973 | false | false | false |
davy39/eric
|
Graphics/Ui_UMLSceneSizeDialog.py
|
1
|
2749
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file './Graphics/UMLSceneSizeDialog.ui'
#
# Created: Tue Nov 18 17:53:58 2014
# by: PyQt5 UI code generator 5.3.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_UMLSceneSizeDialog(object):
def setupUi(self, UMLSceneSizeDialog):
UMLSceneSizeDialog.setObjectName("UMLSceneSizeDialog")
UMLSceneSizeDialog.resize(314, 103)
UMLSceneSizeDialog.setSizeGripEnabled(True)
self.gridlayout = QtWidgets.QGridLayout(UMLSceneSizeDialog)
self.gridlayout.setObjectName("gridlayout")
self.buttonBox = QtWidgets.QDialogButtonBox(UMLSceneSizeDialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.gridlayout.addWidget(self.buttonBox, 2, 0, 1, 2)
self.textLabel2 = QtWidgets.QLabel(UMLSceneSizeDialog)
self.textLabel2.setObjectName("textLabel2")
self.gridlayout.addWidget(self.textLabel2, 1, 0, 1, 1)
self.textLabel1 = QtWidgets.QLabel(UMLSceneSizeDialog)
self.textLabel1.setObjectName("textLabel1")
self.gridlayout.addWidget(self.textLabel1, 0, 0, 1, 1)
self.heightSpinBox = QtWidgets.QSpinBox(UMLSceneSizeDialog)
self.heightSpinBox.setMinimum(100)
self.heightSpinBox.setMaximum(100000)
self.heightSpinBox.setObjectName("heightSpinBox")
self.gridlayout.addWidget(self.heightSpinBox, 1, 1, 1, 1)
self.widthSpinBox = QtWidgets.QSpinBox(UMLSceneSizeDialog)
self.widthSpinBox.setMinimum(100)
self.widthSpinBox.setMaximum(100000)
self.widthSpinBox.setObjectName("widthSpinBox")
self.gridlayout.addWidget(self.widthSpinBox, 0, 1, 1, 1)
self.retranslateUi(UMLSceneSizeDialog)
self.buttonBox.accepted.connect(UMLSceneSizeDialog.accept)
self.buttonBox.rejected.connect(UMLSceneSizeDialog.reject)
QtCore.QMetaObject.connectSlotsByName(UMLSceneSizeDialog)
def retranslateUi(self, UMLSceneSizeDialog):
_translate = QtCore.QCoreApplication.translate
UMLSceneSizeDialog.setWindowTitle(_translate("UMLSceneSizeDialog", "Set Size"))
self.textLabel2.setText(_translate("UMLSceneSizeDialog", "Height (in pixels):"))
self.textLabel1.setText(_translate("UMLSceneSizeDialog", "Width (in pixels):"))
self.heightSpinBox.setToolTip(_translate("UMLSceneSizeDialog", "Select the height of the diagram"))
self.widthSpinBox.setToolTip(_translate("UMLSceneSizeDialog", "Select the width of the diagram"))
|
gpl-3.0
| -7,273,575,584,148,077,000 | 50.867925 | 107 | 0.731539 | false | 3.765753 | false | false | false |
mozilla/mozilla-ignite
|
apps/challenges/migrations/0017_auto__add_field_phase_start_date__add_field_phase_end_date.py
|
1
|
13053
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Phase.start_date'
db.add_column('challenges_phase', 'start_date', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2011, 12, 13, 17, 6, 37, 831418)), keep_default=False)
# Adding field 'Phase.end_date'
db.add_column('challenges_phase', 'end_date', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2012, 6, 13, 17, 6, 37, 831477)), keep_default=False)
def backwards(self, orm):
# Deleting field 'Phase.start_date'
db.delete_column('challenges_phase', 'start_date')
# Deleting field 'Phase.end_date'
db.delete_column('challenges_phase', 'end_date')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'challenges.category': {
'Meta': {'object_name': 'Category'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '60'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '60', 'db_index': 'True'})
},
'challenges.challenge': {
'Meta': {'object_name': 'Challenge'},
'allow_voting': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'description': ('django.db.models.fields.TextField', [], {}),
'end_date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'moderate': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['projects.Project']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '60', 'db_index': 'True'}),
'start_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'summary': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '60'})
},
'challenges.externallink': {
'Meta': {'object_name': 'ExternalLink'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'submission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['challenges.Submission']", 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '255'})
},
'challenges.phase': {
'Meta': {'ordering': "('order',)", 'unique_together': "(('challenge', 'name'),)", 'object_name': 'Phase'},
'challenge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'phases'", 'to': "orm['challenges.Challenge']"}),
'end_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 6, 13, 17, 6, 37, 831477)'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order': ('django.db.models.fields.IntegerField', [], {}),
'start_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2011, 12, 13, 17, 6, 37, 831418)'})
},
'challenges.submission': {
'Meta': {'ordering': "['-id']", 'object_name': 'Submission'},
'brief_description': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['challenges.Category']", 'null': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['users.Profile']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2011, 12, 14, 1, 6, 37, 834376)'}),
'description': ('django.db.models.fields.TextField', [], {}),
'flagged_offensive': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'flagged_offensive_reason': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_live': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_winner': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'phase': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['challenges.Phase']"}),
'sketh_note': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '60'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'projects.project': {
'Meta': {'object_name': 'Project'},
'allow_participation': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'allow_sub_projects': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'featured_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'followers': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'projects_following'", 'symmetrical': 'False', 'to': "orm['users.Profile']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'long_description': ('django.db.models.fields.TextField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'parent_project_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'}),
'sub_project_label': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'team_members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['users.Profile']", 'symmetrical': 'False'}),
'topics': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['topics.Topic']", 'symmetrical': 'False'})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"})
},
'topics.topic': {
'Meta': {'object_name': 'Topic'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'draft': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'long_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'})
},
'users.profile': {
'Meta': {'object_name': 'Profile'},
'avatar': ('django.db.models.fields.files.ImageField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'bio': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'featured_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '255', 'blank': 'True'})
}
}
complete_apps = ['challenges']
|
bsd-3-clause
| -5,817,836,040,785,347,000 | 75.782353 | 185 | 0.553742 | false | 3.653233 | false | false | false |
akellne/toolshed
|
plugins/ifi.py
|
1
|
4495
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import datetime
import urllib2
import re
import json
from base import Plugin
#URL to the ifi news ics file
URL = "http://webhelper.informatik.uni-goettingen.de/editor/ical/ifinews.ics"
#dateformat used in ics files (date with and without time)
ICS_UTC="%Y%m%dT%H%M%SZ"
ICS_DATE="%Y%m%d"
#hours that needs to be shifted for correct times of ics files
TIME_SHIFT = datetime.timedelta(hours=2)
class IfINews(Plugin):
"""
class to parse the ics calendar of the IfI webpage
"""
NAME = "IfI News"
AUTHOR = "[email protected]"
VERSION = (0, 0, 1)
ENABLED = True
HELP = "!ifi shows the cureent ifi news"
CHANNELS = []
def __init__(
self, ircbot, cache_time=datetime.timedelta(hours=1),
random_message=[None, None]
):
Plugin.__init__(self, ircbot, cache_time, random_message)
def on_privmsg(self, msg, *params):
Plugin.on_privmsg(self, msg, *params)
if not self.is_in_channel(params[0]):
#plugin not available in the channel => return
return
if msg == "!ifi":
self.ircbot.switch_personality(nick="chiefsec")
#get data from cache
reload_data, self.data = self.load_cache()
if reload_data:
#reload the data, if too old
self.data = self._get_news()
self.save_cache(data=self.data)
else:
self.data = self.data.encode("utf-8")
message = "--- IfI News: ---\n"
if self.data:
message += self.data
else:
message += "there are currently no news!"
#finally, send the message with the
self.ircbot.privmsg(params[0], message)
self.ircbot.reset_personality()
def _get_news(self):
"""
load ifi news from ifi webpage's ics file
"""
#load url and parse it with simple regex
f = urllib2.urlopen(URL)
ics = f.read()
#parse ics data
li = []
for res in re.compile(
r'BEGIN:VEVENT(.*?)END:VEVENT', re.I|re.S
).findall(ics):
#parse every calendar item found
item = {}
for line in res.split("\n"):
if line.strip():
#replace stuff for all day events that use another format
for x in ("DTSTART", "DTEND"):
line = line.replace(
"%s;VALUE=DATE-TIME" % x,
"%s" % x
)
k, _, v = line.partition(":")
if k in ("SUMMARY", "DTSTART", "DTEND"):
if k == "SUMMARY":
item[k.lower()] = v.strip()
else:
try:
#try to parse date and time
item[k.lower()] = datetime.datetime.strptime(
v.strip(), ICS_UTC
) + TIME_SHIFT
item["onlydate"] = False
except Exception:
try:
#try to parse only date
item[k.lower()] = datetime.datetime.strptime(
v.strip(), ICS_DATE
)
item["onlydate"] = True
except Exception:
pass
li.append(item)
#build message
tmp = ""
for item in sorted(li, key=lambda item: item["dtstart"]):
if item["dtstart"] >= datetime.datetime.today():
if item["onlydate"] is False:
tmp += "%sh to %sh: %s\n" % (
item["dtstart"].strftime("%a %d. %b %Y, %H:%M"),
item["dtend"].strftime("%H:%M"),
item["summary"].replace("\\", "")
)
else:
tmp += "%sh %s\n" % (
item["dtstart"].strftime("%a %d. %b %Y"),
item["summary"].replace("\\", "")
)
return tmp.decode("latin-1").encode("utf-8")
|
gpl-3.0
| -3,895,263,639,573,684,700 | 32.051471 | 81 | 0.441824 | false | 4.293219 | false | false | false |
cherry-wb/SideTools
|
examples/mainwindows/sdi/sdi.py
|
1
|
11026
|
#!/usr/bin/env python
############################################################################
#
# Copyright (C) 2004-2005 Trolltech AS. All rights reserved.
#
# This file is part of the example classes of the Qt Toolkit.
#
# This file may be used under the terms of the GNU General Public
# License version 2.0 as published by the Free Software Foundation
# and appearing in the file LICENSE.GPL included in the packaging of
# self file. Please review the following information to ensure GNU
# General Public Licensing requirements will be met:
# http://www.trolltech.com/products/qt/opensource.html
#
# If you are unsure which license is appropriate for your use, please
# review the following information:
# http://www.trolltech.com/products/qt/licensing.html or contact the
# sales department at [email protected].
#
# This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
# WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
#
############################################################################
# This is only needed for Python v2 but is harmless for Python v3.
#import sip
#sip.setapi('QVariant', 2)
from PySide import QtCore, QtGui
import sdi_rc
class MainWindow(QtGui.QMainWindow):
sequenceNumber = 1
windowList = []
def __init__(self, fileName=None):
super(MainWindow, self).__init__()
self.init()
if fileName:
self.loadFile(fileName)
else:
self.setCurrentFile('')
def closeEvent(self, event):
if self.maybeSave():
self.writeSettings()
event.accept()
else:
event.ignore()
def newFile(self):
other = MainWindow()
MainWindow.windowList.append(other)
other.move(self.x() + 40, self.y() + 40)
other.show()
def open(self):
fileName, filtr = QtGui.QFileDialog.getOpenFileName(self)
if fileName:
existing = self.findMainWindow(fileName)
if existing:
existing.show()
existing.raise_()
existing.activateWindow()
return
if self.isUntitled and self.textEdit.document().isEmpty() and not self.isWindowModified():
self.loadFile(fileName)
else:
other = MainWindow(fileName)
if other.isUntitled:
del other
return
MainWindow.windowList.append(other)
other.move(self.x() + 40, self.y() + 40)
other.show()
def save(self):
if self.isUntitled:
return self.saveAs()
else:
return self.saveFile(self.curFile)
def saveAs(self):
fileName, filtr = QtGui.QFileDialog.getSaveFileName(self, "Save As",
self.curFile)
if not fileName:
return False
return self.saveFile(fileName)
def about(self):
QtGui.QMessageBox.about(self, "About SDI",
"The <b>SDI</b> example demonstrates how to write single "
"document interface applications using Qt.")
def documentWasModified(self):
self.setWindowModified(True)
def init(self):
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.isUntitled = True
self.textEdit = QtGui.QTextEdit()
self.setCentralWidget(self.textEdit)
self.createActions()
self.createMenus()
self.createToolBars()
self.createStatusBar()
self.readSettings()
self.textEdit.document().contentsChanged.connect(self.documentWasModified)
def createActions(self):
self.newAct = QtGui.QAction(QtGui.QIcon(':/images/new.png'), "&New",
self, shortcut=QtGui.QKeySequence.New,
statusTip="Create a new file", triggered=self.newFile)
self.openAct = QtGui.QAction(QtGui.QIcon(':/images/open.png'),
"&Open...", self, shortcut=QtGui.QKeySequence.Open,
statusTip="Open an existing file", triggered=self.open)
self.saveAct = QtGui.QAction(QtGui.QIcon(':/images/save.png'),
"&Save", self, shortcut=QtGui.QKeySequence.Save,
statusTip="Save the document to disk", triggered=self.save)
self.saveAsAct = QtGui.QAction("Save &As...", self,
shortcut=QtGui.QKeySequence.SaveAs,
statusTip="Save the document under a new name",
triggered=self.saveAs)
self.closeAct = QtGui.QAction("&Close", self, shortcut="Ctrl+W",
statusTip="Close this window", triggered=self.close)
self.exitAct = QtGui.QAction("E&xit", self, shortcut="Ctrl+Q",
statusTip="Exit the application",
triggered=QtGui.qApp.closeAllWindows)
self.cutAct = QtGui.QAction(QtGui.QIcon(':/images/cut.png'), "Cu&t",
self, enabled=False, shortcut=QtGui.QKeySequence.Cut,
statusTip="Cut the current selection's contents to the clipboard",
triggered=self.textEdit.cut)
self.copyAct = QtGui.QAction(QtGui.QIcon(':/images/copy.png'),
"&Copy", self, enabled=False, shortcut=QtGui.QKeySequence.Copy,
statusTip="Copy the current selection's contents to the clipboard",
triggered=self.textEdit.copy)
self.pasteAct = QtGui.QAction(QtGui.QIcon(':/images/paste.png'),
"&Paste", self, shortcut=QtGui.QKeySequence.Paste,
statusTip="Paste the clipboard's contents into the current selection",
triggered=self.textEdit.paste)
self.aboutAct = QtGui.QAction("&About", self,
statusTip="Show the application's About box",
triggered=self.about)
self.aboutQtAct = QtGui.QAction("About &Qt", self,
statusTip="Show the Qt library's About box",
triggered=QtGui.qApp.aboutQt)
self.textEdit.copyAvailable.connect(self.cutAct.setEnabled)
self.textEdit.copyAvailable.connect(self.copyAct.setEnabled)
def createMenus(self):
self.fileMenu = self.menuBar().addMenu("&File")
self.fileMenu.addAction(self.newAct)
self.fileMenu.addAction(self.openAct)
self.fileMenu.addAction(self.saveAct)
self.fileMenu.addAction(self.saveAsAct)
self.fileMenu.addSeparator()
self.fileMenu.addAction(self.closeAct)
self.fileMenu.addAction(self.exitAct)
self.editMenu = self.menuBar().addMenu("&Edit")
self.editMenu.addAction(self.cutAct)
self.editMenu.addAction(self.copyAct)
self.editMenu.addAction(self.pasteAct)
self.menuBar().addSeparator()
self.helpMenu = self.menuBar().addMenu("&Help")
self.helpMenu.addAction(self.aboutAct)
self.helpMenu.addAction(self.aboutQtAct)
def createToolBars(self):
self.fileToolBar = self.addToolBar("File")
self.fileToolBar.addAction(self.newAct)
self.fileToolBar.addAction(self.openAct)
self.fileToolBar.addAction(self.saveAct)
self.editToolBar = self.addToolBar("Edit")
self.editToolBar.addAction(self.cutAct)
self.editToolBar.addAction(self.copyAct)
self.editToolBar.addAction(self.pasteAct)
def createStatusBar(self):
self.statusBar().showMessage("Ready")
def readSettings(self):
settings = QtCore.QSettings('Trolltech', 'SDI Example')
pos = settings.value('pos', QtCore.QPoint(200, 200))
size = settings.value('size', QtCore.QSize(400, 400))
self.move(pos)
self.resize(size)
def writeSettings(self):
settings = QtCore.QSettings('Trolltech', 'SDI Example')
settings.setValue('pos', self.pos())
settings.setValue('size', self.size())
def maybeSave(self):
if self.textEdit.document().isModified():
ret = QtGui.QMessageBox.warning(self, "SDI",
"The document has been modified.\nDo you want to save "
"your changes?",
QtGui.QMessageBox.Save | QtGui.QMessageBox.Discard |
QtGui.QMessageBox.Cancel)
if ret == QtGui.QMessageBox.Save:
return self.save()
elif ret == QtGui.QMessageBox.Cancel:
return False
return True
def loadFile(self, fileName):
file = QtCore.QFile(fileName)
if not file.open( QtCore.QFile.ReadOnly | QtCore.QFile.Text):
QtGui.QMessageBox.warning(self, "SDI",
"Cannot read file %s:\n%s." % (fileName, file.errorString()))
return
instr = QtCore.QTextStream(file)
QtGui.QApplication.setOverrideCursor(QtCore.Qt.WaitCursor)
self.textEdit.setPlainText(instr.readAll())
QtGui.QApplication.restoreOverrideCursor()
self.setCurrentFile(fileName)
self.statusBar().showMessage("File loaded", 2000)
def saveFile(self, fileName):
file = QtCore.QFile(fileName)
if not file.open( QtCore.QFile.WriteOnly | QtCore.QFile.Text):
QtGui.QMessageBox.warning(self, "SDI",
"Cannot write file %s:\n%s." % (fileName, file.errorString()))
return False
outstr = QtCore.QTextStream(file)
QtGui.QApplication.setOverrideCursor(QtCore.Qt.WaitCursor)
outstr << self.textEdit.toPlainText()
QtGui.QApplication.restoreOverrideCursor()
self.setCurrentFile(fileName)
self.statusBar().showMessage("File saved", 2000)
return True
def setCurrentFile(self, fileName):
self.isUntitled = not fileName
if self.isUntitled:
self.curFile = "document%d.txt" % MainWindow.sequenceNumber
MainWindow.sequenceNumber += 1
else:
self.curFile = QtCore.QFileInfo(fileName).canonicalFilePath()
self.textEdit.document().setModified(False)
self.setWindowModified(False)
self.setWindowTitle("%s[*] - SDI" % self.strippedName(self.curFile))
def strippedName(self, fullFileName):
return QtCore.QFileInfo(fullFileName).fileName()
def findMainWindow(self, fileName):
canonicalFilePath = QtCore.QFileInfo(fileName).canonicalFilePath()
for widget in QtGui.qApp.topLevelWidgets():
if isinstance(widget, MainWindow) and widget.curFile == canonicalFilePath:
return widget
return None
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
mainWin = MainWindow()
mainWin.show()
sys.exit(app.exec_())
|
apache-2.0
| -1,914,550,978,136,689,000 | 35.376271 | 102 | 0.597497 | false | 4.170197 | false | false | false |
Zefiros-Software/Zefiros-Bot
|
bot/slack_clients.py
|
1
|
8173
|
import logging
import re
import time
import json
import traceback
import os
import raven
from slacker import Slacker
from slackclient import SlackClient
from asq.initiators import *
from phabricator import Phabricator
from functools import wraps
class memo:
def __init__(self, fn):
self.fn = fn
self.memo = {}
def __get__(self, obj, objtype):
"""Support instance methods."""
import functools
return functools.partial(self.__call__, obj)
def __call__(self, *args, **kwds):
import pickle
str = pickle.dumps(args[1:], -1)+pickle.dumps(kwds, -1)
if not str in self.memo:
result = self.fn(*args, **kwds)
if result:
self.memo[str] = result
else:
return result
return self.memo[str]
logger = logging.getLogger(__name__)
class SlackClients(object):
def __init__(self, token, phabuser, phabtoken, phabhost):
#logger.debug('Starting client with token: {}, {}, {}, {}'.format(token, phabuser, phabtoken, phabhost))
self.token = token
# Slacker is a Slack Web API Client
self.web = Slacker(token)
# SlackClient is a Slack Websocket RTM API Client
self.rtm = SlackClient(token)
sentryKey = str(os.getenv("SENTRY_KEY", "")).strip()
sentrySecret = str(os.getenv("SENTRY_SECRET", "")).strip()
sentryUrl = str(os.getenv("SENTRY_URL", "")).strip()
self.sentry = raven.Client( 'https://{}:{}@{}'.format(sentryKey, sentrySecret, sentryUrl) )
self.phab = Phabricator(username=phabuser, token=phabtoken, host=phabhost)
self.phab.update_interfaces()
self.priorityTranslations = {100: 'Unbreak Now!', 90: 'Needs Triage', 80: 'High', 50: 'Normal', 25: 'Low', 0: 'Wishlist'}
def queryPhabFeed(self, after):
return self.phab.feed.query( after=after, view="text" )
def queryPhabManiphest(self, owners, **kwargs):
return self.phab.maniphest.query(ownerPHIDs=owners, order="order-priority", status="status-open", limit=5, **kwargs)
def findPhabIssue(self, issue):
return self.phab.maniphest.info( task_id=issue )
def createPhabTask(self, title, creators, projects):
auxiliary = { 'std:maniphest:zefiros:isFromSlack': True, 'std:maniphest:zefiros:creators': creators }
if projects:
return self.phab.maniphest.createtask( title=title ,ccPHIDs=creators, projectPHIDs=projects, auxiliary=auxiliary)
return self.phab.maniphest.createtask( title=title, ccPHIDs=creators, auxiliary=auxiliary)
def setPhabTask(self, task, **kwargs):
return self.phab.maniphest.update( phid=task, **kwargs )
def getPriorityLabelByValue(self, priorityValue):
return self.priorityTranslations.get( priorityValue )
def findPhabIssueByPHID(self, issuePHID):
issues = self.phab.maniphest.query( phids=[issuePHID] )
if len( issues ) == 0:
return None
return issues[issuePHID]
def findPhabRepository(self, repositoryPHID):
return query( list( self.phab.phid.lookup( names=[repositoryPHID] ).items() ) ).where( lambda tuple: repositoryPHID in tuple[0] ).select( lambda tuple: tuple[1] ).to_list()[0]
def findPhabRepositoryInfo( self, repositoryPhid):
return query( list(self.phab.diffusion.repository.search( phids=[repositoryPhid] )["data"].items() ) ).select( lambda repo: repo["fields"] ).to_list()[0]
def findPhabCommits( self, commitPHIDs):
commits = self.phab.diffusion.querycommits( phids=commitPHIDs )["data"]
if len( commits ) == 0:
return None
return query( list( commits.items() ) ).select( lambda tuple: tuple[1] ).to_list()
def findWorkboardColumn(self, columnPHID):
return query( list( self.phab.phid.lookup( names=[columnPHID] ).items() ) ).where( lambda tuple: columnPHID in tuple[0] ).select( lambda tuple: tuple[1] ).to_list()[0]
@memo
def findPhabUsers(self, userIds):
return self.phab.user.query( phids=userIds )
@memo
def findPhabProject(self, names):
projects = self.phab.project.query(names=names)
if len( projects ) == 0:
return None
return projects
@memo
def findPhabProjectsByPHID(self, phids):
projects = self.phab.project.query(phids=phids)['data']
if len( projects ) == 0:
return None
return list( projects.values() )
@memo
def findSlackUsersToPhab(self, userNames):
userNames = self.findSlackUserNames( userNames )
users = self.phab.user.query(usernames=userNames)
return query(users).select(lambda u: u['phid'] ).to_list()
@memo
def findSlackUserNames(self, userNames):
userList = json.loads(self.rtm.server.api_call( "users.list", presence=0 ))
return query(userList['members']).where(lambda im: "email" in im['profile'] and im['profile']['email'] is not None) \
.where(lambda im: im['name'] in userNames) \
.select(lambda im: im['profile']['email'].split('@')[0]).to_list()
def findManiphestTransactions(self, taskId, transactionPHIDs):
transactions = self.phab.maniphest.gettasktransactions( ids=[taskId] )
return query(transactions["{}".format(taskId)]).where(lambda t: t['transactionType'] != 'core:customfield' and t['transactionType'] != 'core:edge' and t['transactionType'] != 'core:subscribers' and any(t['transactionPHID'] in transactionPHID for transactionPHID in transactionPHIDs)).to_list()
@memo
def findPhabUsersToSlack(self, userIds):
users = self.findPhabUsers(userIds)
users = self.findUsers( query(users).select(lambda u: u['userName'] ).to_list() )
if users:
return query(users).select(lambda u: u['id'] ).to_list()
else:
return None
@memo
def findUsers(self, users):
userList = json.loads(self.rtm.server.api_call( "users.list", presence=0 ))
users = query(userList['members']).where(lambda im: "email" in im['profile'] and im['profile']['email'] is not None).where(lambda im: im['profile']['email'].split('@')[0] in users).to_list()
if len( users ) == 0:
return None
return users
@memo
def findChannelId(self, names):
channelsList = json.loads(self.rtm.server.api_call( "channels.list", exclude_archived=1 ))
channels = query(channelsList['channels']).where(lambda im: im['name'].lower() in names.lower()).select(lambda u: u['id'] ).to_list()
if len( channels ) == 0:
return None
return channels
def botUserId(self):
return self.rtm.server.login_data['self']['id']
def isMessageFromMe(self, user):
return user == self.rtm.server.login_data['self']['id']
@memo
def isDirectMessage(self, channel):
imList = json.loads(self.rtm.server.api_call( "im.list" ))
return len(query(imList['ims']).where(lambda im: im['id'] == channel).to_list()) > 0
@memo
def isBot(self, user):
userInf = json.loads(self.rtm.server.api_call( "users.info", user=user ))
return "is_bot" in userInf['user'] and userInf['user']['is_bot']
def isBotMention(self, message):
botUserName = self.rtm.server.login_data['self']['id']
if re.search("@{}".format(botUserName), message):
return True
else:
return False
def sendUserTypingPause(self, channelId, sleepTime=3.0):
userTypingJson = {"type": "typing", "channel": channelId}
self.rtm.server.send_to_websocket(userTypingJson)
time.sleep(sleepTime)
def logExceptionToSlack(self, e):
logger.exception( e )
self.sentry.captureException()
|
mit
| 7,156,433,444,986,988,000 | 38.261084 | 301 | 0.608345 | false | 3.66996 | false | false | false |
Wilsh/goldwarsplus
|
commerce/models.py
|
1
|
13576
|
from django.db import models
from django.utils import timezone
from urllib.request import urlretrieve
import hashlib
import os
from django.conf import settings
from math import ceil
# Create your models here.
class Item(models.Model):
'''All items discovered by players in the game'''
item_id = models.PositiveIntegerField(primary_key=True)
chat_link = models.CharField(max_length=120, default='')
name = models.CharField(max_length=200, default='[no name provided]')
icon = models.ForeignKey('Icon', on_delete=models.CASCADE)
description = models.TextField(default='No description provided')
type = models.CharField(max_length=20, default='')
rarity = models.CharField(max_length=10, default='')
level = models.PositiveSmallIntegerField(default=0)
vendor_value = models.PositiveIntegerField(default=0)
seen_on_trading_post = models.BooleanField(default=False)
can_be_crafted = models.BooleanField(default=False)
can_purchase_from_vendor = models.BooleanField(default=False)
vendor_price = models.PositiveIntegerField(default=0)
date_added = models.DateTimeField()
class Meta:
ordering = ["-date_added"]
def __str__(self):
return "Item " + str(self.item_id) + ": " + self.name
def add_details(self, itemdetails):
self.item_id = itemdetails['id']
self.chat_link = itemdetails['chat_link']
if itemdetails['name'] != '':
self.name = itemdetails['name']
try:
if itemdetails['description'] != '':
self.description = itemdetails['description']
except KeyError:
pass
self.type = itemdetails['type']
self.rarity = itemdetails['rarity']
self.level = itemdetails['level']
self.vendor_value = itemdetails['vendor_value']
self.date_added = timezone.now()
def get_market_buy(self, quantity=1):
'''Return the cost of the quantity of this item if bought on the trading post'''
sell_orders = self.selllisting_set.all().order_by('unit_price')
total = 0
count = 0
for order in sell_orders:
if (order.quantity + count) < quantity:
count += order.quantity
total += order.quantity * order.unit_price
else:
total += (quantity - count) * order.unit_price
return total
#quantity not available
return 0
def get_market_sell(self):
'''Return the value of this item if sold immediately on the trading post'''
buy_order = self.buylisting_set.order_by('-unit_price').first()
return buy_order.unit_price if buy_order else 0
def get_market_delay_sell(self):
'''Return the value of this item if sold one copper below the lowest current
selling price on the trading post. Returns 0 if none of these items are listed'''
sell_order = self.selllisting_set.order_by('unit_price').first()
return sell_order.unit_price - 1 if sell_order else 0
def buy_or_craft(self, quantity=1):
'''Return the cheapest method to obtain this Item as a nested list of
Items designated as 'buy' or 'craft' depending upon whether it is cheaper
to buy that Item on the trading post or craft the Item after buying its
base components'''
purchase_price = self.get_market_buy(quantity)
if purchase_price == 0: #not available
purchase_price = 9999999999
if not self.can_be_crafted:
return ['buy', purchase_price, quantity, [self.item_id, self.name]]
recipe_id_list = []
recipe_name_list = []
cheapest_recipe_idx = 0
ingredient_list = []
crafting_price = 0
num_recipes = 0
for recipe in self.recipe_set.all():
ingredient_sublist = []
recipe_id_list.append(recipe.recipe_id)
recipe_name_list.append([recipe.output_item_id, recipe.output_item_id.name])
for ingredient in recipe.recipeingredient_set.all():
should_buy = ingredient.item_id.buy_or_craft(ceil(ingredient.count / recipe.output_item_count))
if should_buy[0] == 'buy':
cost_multiplier = 1
else:
cost_multiplier = ceil(ingredient.count / recipe.output_item_count)
ingredient_sublist.append([should_buy, cost_multiplier])
ingredient_list.append(ingredient_sublist)
num_recipes += 1
if num_recipes > 1:
ingredient_list, cheapest_recipe_idx, crafting_price = self.get_cheapest_recipe(ingredient_list)
else:
ingredient_list = ingredient_list[0]
for ingredient, count in ingredient_list:
crafting_price += self.get_component_cost(ingredient, count)
if crafting_price < purchase_price:
return ['craft', crafting_price, quantity, ingredient_list, recipe_name_list[cheapest_recipe_idx], recipe_id_list[cheapest_recipe_idx]]
else:
return ['buy', purchase_price, quantity, [self.item_id, self.name]]
def get_cheapest_recipe(self, recipe_list):
'''Given a list of lists of ingredients for multiple Recipes, return
the list of Recipe ingredients that are the cheapest to obtain along
with the index of the recipe_list containing the cheapest ingredients
and the total cost of those ingredients.
Intended for Items that can be crafted by more than one Recipe'''
cheapest_idx = 0
current_idx = 0
cheapest_price = 9999999999
for ingredient_list in recipe_list:
crafting_price = 0
for ingredient, count in ingredient_list:
crafting_price += self.get_component_cost(ingredient, count)
if crafting_price < cheapest_price:
cheapest_price = crafting_price
cheapest_idx = current_idx
current_idx += 1
return (recipe_list[cheapest_idx], cheapest_idx, cheapest_price)
def get_component_cost(self, list, num_items):
'''Return the cost of an Item in a list instantiated by buy_or_craft'''
cost = 0
if list[0] == 'buy' or list[0] == 'craft':
cost = list[1] * num_items
return cost
class ItemFlag(models.Model):
'''Flags applying to an Item'''
for_item = models.OneToOneField('Item', on_delete=models.CASCADE)
AccountBindOnUse = models.BooleanField(default=False)
AccountBound = models.BooleanField(default=False)
HideSuffix = models.BooleanField(default=False)
MonsterOnly = models.BooleanField(default=False)
NoMysticForge = models.BooleanField(default=False)
NoSalvage = models.BooleanField(default=False)
NoSell = models.BooleanField(default=False)
NotUpgradeable = models.BooleanField(default=False)
NoUnderwater = models.BooleanField(default=False)
SoulbindOnAcquire = models.BooleanField(default=False)
SoulBindOnUse = models.BooleanField(default=False)
Unique = models.BooleanField(default=False)
class Meta:
ordering = ["for_item"]
def __str__(self):
return "Flags for item " + str(self.for_item.item_id) + ": " + self.for_item.name
def add_details(self, flaglist):
for entry in flaglist:
setattr(self, entry, True)
class EconomicsForItem(models.Model):
'''Economic data applying to an Item that can be found on the trading post'''
for_item = models.OneToOneField('Item', on_delete=models.CASCADE)
price_change_count = models.PositiveIntegerField(default=0)
relist_profit = models.PositiveIntegerField(default=0)
def __str__(self):
return "Economic data for Item " + str(self.for_item.item_id) + ": " + self.for_item.name
class Icon(models.Model):
'''Icons used for Items'''
url = models.CharField(primary_key=True, max_length=120)
static_id = models.CharField(max_length=36, default='unknown.png')
def __str__(self):
return "Icon for Items " + self.static_id
def add_details(self):
self.static_id = hashlib.md5(self.url.encode('utf-8')).hexdigest()
self.static_id += '.png'
urlretrieve(self.url, os.path.join(settings.BASE_DIR, 'commerce/static/commerce/items/') + self.static_id)
class Recipe(models.Model):
'''All recipes for craftable Items discovered by
players in the game'''
recipe_id = models.PositiveIntegerField(primary_key=True)
type = models.CharField(max_length=30, default='')
output_item_id = models.ForeignKey('Item', on_delete=models.CASCADE)
output_item_count = models.PositiveSmallIntegerField(default=0)
min_rating = models.PositiveSmallIntegerField(default=0)
AutoLearned = models.BooleanField(default=False)
LearnedFromItem = models.BooleanField(default=False)
date_added = models.DateTimeField()
class Meta:
ordering = ["-date_added"]
def __str__(self):
return "Recipe for item " + str(self.output_item_id.item_id) + ": " + self.output_item_id.name
def add_details(self, recipedetails):
self.recipe_id = recipedetails['id']
self.type = recipedetails['type']
self.output_item_count = recipedetails['output_item_count']
self.min_rating = recipedetails['min_rating']
for entry in recipedetails['flags']:
setattr(self, entry, True)
self.date_added = timezone.now()
class EconomicsForRecipe(models.Model):
'''Economic data applying to a Recipe'''
for_recipe = models.OneToOneField('Recipe', on_delete=models.CASCADE)
limited_production = models.BooleanField(default=False)
ingredient_cost = models.PositiveIntegerField(default=0)
fast_crafting_profit = models.IntegerField(default=0)
delayed_crafting_profit = models.IntegerField(default=0)
def __str__(self):
return "Economic data for Recipe " + str(self.for_recipe.recipe_id) + ": " + self.for_recipe.output_item_id.name
class RecipeDiscipline(models.Model):
'''Discipline flags applying to a Recipe'''
for_recipe = models.OneToOneField('Recipe', on_delete=models.CASCADE)
Artificer = models.BooleanField(default=False)
Armorsmith = models.BooleanField(default=False)
Chef = models.BooleanField(default=False)
Huntsman = models.BooleanField(default=False)
Jeweler = models.BooleanField(default=False)
Leatherworker = models.BooleanField(default=False)
Tailor = models.BooleanField(default=False)
Weaponsmith = models.BooleanField(default=False)
Scribe = models.BooleanField(default=False)
class Meta:
ordering = ["for_recipe"]
def __str__(self):
return "Disciplines for recipe " + str(self.for_recipe.recipe_id) + ": " + self.for_recipe.output_item_id.name
def add_details(self, disciplines):
for entry in disciplines:
setattr(self, entry, True)
def get_disciplines(self):
disciplines = []
disciplines.append(['Artificer', self.Artificer])
disciplines.append(['Armorsmith', self.Armorsmith])
disciplines.append(['Chef', self.Chef])
disciplines.append(['Huntsman', self.Huntsman])
disciplines.append(['Jeweler', self.Jeweler])
disciplines.append(['Leatherworker', self.Leatherworker])
disciplines.append(['Tailor', self.Tailor])
disciplines.append(['Weaponsmith', self.Weaponsmith])
disciplines.append(['Scribe', self.Scribe])
return disciplines
class RecipeIngredient(models.Model):
'''An Item and its quantity required for a Recipe'''
for_recipe = models.ForeignKey('Recipe', on_delete=models.CASCADE)
item_id = models.ForeignKey('Item', on_delete=models.CASCADE)
count = models.PositiveSmallIntegerField()
class Meta:
ordering = ["for_recipe"]
def __str__(self):
return "Ingredient for recipe " + str(self.for_recipe.recipe_id) + ": " + self.for_recipe.output_item_id.name
def add_details(self, ingredient):
self.count = ingredient['count']
class BuyListing(models.Model):
'''A buy order for an Item listed on the trading post'''
for_item = models.ForeignKey('Item', on_delete=models.CASCADE)
quantity = models.PositiveIntegerField()
unit_price = models.PositiveIntegerField()
date_added = models.DateTimeField()
class Meta:
ordering = ["-unit_price"]
def __str__(self):
return "Buy order for item " + str(self.for_item.item_id) + ": " + self.for_item.name + " at price: " + str(self.unit_price)
def add_details(self, listing):
self.quantity = listing['quantity']
self.unit_price = listing['unit_price']
self.date_added = timezone.now()
class SellListing(models.Model):
'''A sell order for an Item listed on the trading post'''
for_item = models.ForeignKey('Item', on_delete=models.CASCADE)
quantity = models.PositiveIntegerField()
unit_price = models.PositiveIntegerField()
date_added = models.DateTimeField()
class Meta:
ordering = ["unit_price"]
def __str__(self):
return "Sell order for item " + str(self.for_item.item_id) + ": " + self.for_item.name + " at price: " + str(self.unit_price)
def add_details(self, listing):
self.quantity = listing['quantity']
self.unit_price = listing['unit_price']
self.date_added = timezone.now()
|
mit
| -482,726,655,842,744,700 | 42.373802 | 147 | 0.648129 | false | 3.781616 | false | false | false |
marscher/mdtraj
|
MDTraj/utils/unit/quantity.py
|
1
|
27812
|
#!/bin/env python
"""
Module simtk.unit.quantity
Physical quantities with units, intended to produce similar functionality
to Boost.Units package in C++ (but with a runtime cost).
Uses similar API as Scientific.Physics.PhysicalQuantities
but different internals to satisfy our local requirements.
In particular, there is no underlying set of 'canonical' base
units, whereas in Scientific.Physics.PhysicalQuantities all
units are secretly in terms of SI units. Also, it is easier
to add new fundamental dimensions to simtk.dimensions. You
might want to make new dimensions for, say, "currency" or
"information".
Some features of this implementation:
* Quantities are a combination of a value and a unit. The value
part can be any python type, including numbers, lists, numpy
arrays, and anything else. The unit part must be a simtk.unit.Unit.
* Operations like adding incompatible units raises an error.
* Multiplying or dividing units/quantities creates new units.
* Users can create new Units and Dimensions, but most of the useful
ones are predefined.
* Conversion factors between units are applied transitively, so all
possible conversions are available.
* I want dimensioned Quantities that are compatible with numpy arrays,
but do not necessarily require the python numpy package. In other
words, Quantities can be based on either numpy arrays or on built in
python types.
* Units are NOT necessarily stored in terms of SI units internally.
This is very important for me, because one important application
area for us is at the molecular scale. Using SI units internally
can lead to exponent overflow in commonly used molecular force
calculations. Internally, all unit systems are equally fundamental
in SimTK.
Two possible enhancements that have not been implemented are
1) Include uncertainties with propagation of errors
2) Incorporate offsets for celsius <-> kelvin conversion
This is part of the OpenMM molecular simulation toolkit originating from
Simbios, the NIH National Center for Physics-Based Simulation of
Biological Structures at Stanford, funded under the NIH Roadmap for
Medical Research, grant U54 GM072970. See https://simtk.org.
Portions copyright (c) 2012 Stanford University and the Authors.
Authors: Christopher M. Bruns
Contributors: Peter Eastman
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS, CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import division
__author__ = "Christopher M. Bruns"
__version__ = "0.5"
import math
import copy
from .standard_dimensions import *
from .unit import Unit, is_unit, dimensionless
class Quantity(object):
"""Physical quantity, such as 1.3 meters per second.
Quantities contain both a value, such as 1.3; and a unit,
such as 'meters per second'.
Supported value types include:
1 - numbers (float, int, long)
2 - lists of numbers, e.g. [1,2,3]
3 - tuples of numbers, e.g. (1,2,3)
Note - unit conversions will cause tuples to be converted to lists
4 - lists of tuples of numbers, lists of lists of ... etc. of numbers
5 - numpy.arrays
Create numpy.arrays with units using the Quantity constructor, not the
multiply operator. e.g.
Quantity(numpy.array([1,2,3]), centimeters) # correct
*NOT*
numpy.array([1,2,3]) * centimeters # won't work
because numpy.arrays already overload the multiply operator for EVERYTHING.
"""
def __init__(self, value=None, unit=None):
"""
Create a new Quantity from a value and a unit.
Parameters
- value: (any type, usually a number) Measure of this quantity
- unit: (Unit) the physical unit, e.g. simtk.unit.meters.
"""
# When no unit is specified, bend over backwards to handle all one-argument possibilities
if unit == None: # one argument version, copied from UList
if is_unit(value):
# Unit argument creates an empty list with that unit attached
unit = value
value = []
elif is_quantity(value):
# Ulist of a Quantity is just the Quantity itself
unit = value.unit
value = value._value
elif _is_string(value):
unit = dimensionless
else:
# Is value a container?
is_container = True
try:
i = iter(value)
except TypeError:
is_container = False
if is_container:
if len(value) < 1:
unit = dimensionless
else:
first_item = iter(value).next()
# Avoid infinite recursion for string, because a one-character
# string is its own first element
if value == first_item:
unit = dimensionless
else:
unit = Quantity(first_item).unit
# Notice that tuples, lists, and numpy.arrays can all be initialized with a list
new_container = Quantity([], unit)
for item in value:
new_container.append(Quantity(item)) # Strips off units into list new_container._value
# __class__ trick does not work for numpy.arrays
try:
import numpy
if isinstance(value, numpy.ndarray):
value = numpy.array(new_container._value)
else:
# delegate contruction to container class from list
value = value.__class__(new_container._value)
except ImportError:
# delegate contruction to container class from list
value = value.__class__(new_container._value)
else:
# Non-Quantity, non container
# Wrap in a dimensionless Quantity
unit = dimensionless
# Accept simple scalar quantities as units
if is_quantity(unit):
value = value * unit._value
unit = unit.unit
# Use empty list for unspecified values
if value == None:
value = []
self._value = value
self.unit = unit
def __getstate__(self):
state = dict()
state['_value'] = self._value
state['unit'] = self.unit
return state
def __setstate__(self, state):
self._value = state['_value']
self.unit = state['unit']
return
def __copy__(self):
"""
Shallow copy produces a new Quantity with the shallow copy of value and the same unit.
Because we want copy operations to work just the same way they would on the underlying value.
"""
return Quantity(copy.copy(self._value), self.unit)
def __deepcopy__(self, memo):
"""
Deep copy produces a new Quantity with a deep copy of the value, and the same unit.
Because we want copy operations to work just the same way they would on the underlying value.
"""
return Quantity(copy.deepcopy(self._value, memo), self.unit)
def __getattr__(self, attribute):
"""
Delegate unrecognized attribute calls to the underlying value type.
"""
ret_val = getattr(self._value, attribute)
return ret_val
def __str__(self):
"""Printable string version of this Quantity.
Returns a string consisting of quantity number followed by unit abbreviation.
"""
return str(self._value) + ' ' + str(self.unit.get_symbol())
def __repr__(self):
"""
"""
return (Quantity.__name__ + '(value=' + repr(self._value) + ', unit=' +
str(self.unit) + ')')
def format(self, format_spec):
return format_spec % self._value + ' ' + str(self.unit.get_symbol())
def __add__(self, other):
"""Add two Quantities.
Only Quantities with the same dimensions (e.g. length)
can be added. Raises TypeError otherwise.
Parameters
- self: left hand member of sum
- other: right hand member of sum
Returns a new Quantity that is the sum of the two arguments.
"""
# can only add using like units
if not self.unit.is_compatible(other.unit):
raise TypeError('Cannot add two quantities with incompatible units "%s" and "%s".' % (self.unit, other.unit))
value = self._value + other.value_in_unit(self.unit)
unit = self.unit
return Quantity(value, unit)
def __sub__(self, other):
"""Subtract two Quantities.
Only Quantities with the same dimensions (e.g. length)
can be subtracted. Raises TypeError otherwise.
Parameters
- self: left hand member (a) of a - b.
- other: right hand member (b) of a - b.
Returns a new Quantity that is the difference of the two arguments.
"""
if not self.unit.is_compatible(other.unit):
raise TypeError('Cannot subtract two quantities with incompatible units "%s" and "%s".' % (self.unit, other.unit))
value = self._value - other.value_in_unit(self.unit)
unit = self.unit
return Quantity(value, unit)
def __eq__(self, other):
"""
"""
if not is_quantity(other):
return False
if not self.unit.is_compatible(other.unit):
return False
return self.value_in_unit(other.unit) == other._value
def __ne__(self, other):
"""
"""
return not self.__eq__(other)
def __lt__(self, other):
"""Compares two quantities.
Raises TypeError if the Quantities are of different dimension (e.g. length vs. mass)
Returns True if self < other, False otherwise.
"""
return self._value < other.value_in_unit(self.unit)
def __ge__(self, other):
return self._value >= (other.value_in_unit(self.unit))
def __gt__(self, other):
return self._value > (other.value_in_unit(self.unit))
def __le__(self, other):
return self._value <= (other.value_in_unit(self.unit))
def __lt__(self, other):
return self._value < (other.value_in_unit(self.unit))
_reduce_cache = {}
def reduce_unit(self, guide_unit=None):
"""
Combine similar component units and scale, to form an
equal Quantity in simpler units.
Returns underlying value type if unit is dimensionless.
"""
key = (self.unit, guide_unit)
if key in Quantity._reduce_cache:
(unit, value_factor) = Quantity._reduce_cache[key]
else:
value_factor = 1.0
canonical_units = {} # dict of dimensionTuple: (Base/ScaledUnit, exponent)
# Bias result toward guide units
if guide_unit != None:
for u, exponent in guide_unit.iter_base_or_scaled_units():
d = u.get_dimension_tuple()
if d not in canonical_units:
canonical_units[d] = [u, 0]
for u, exponent in self.unit.iter_base_or_scaled_units():
d = u.get_dimension_tuple()
# Take first unit found in a dimension as canonical
if d not in canonical_units:
canonical_units[d] = [u, exponent]
else:
value_factor *= (u.conversion_factor_to(canonical_units[d][0])**exponent)
canonical_units[d][1] += exponent
new_base_units = {}
for d in canonical_units:
u, exponent = canonical_units[d]
if exponent != 0:
assert u not in new_base_units
new_base_units[u] = exponent
# Create new unit
if len(new_base_units) == 0:
unit = dimensionless
else:
unit = Unit(new_base_units)
# There might be a factor due to unit conversion, even though unit is dimensionless
# e.g. suppose unit is meter/centimeter
if unit.is_dimensionless():
unit_factor = unit.conversion_factor_to(dimensionless)
if unit_factor != 1.0:
value_factor *= unit_factor
# print "value_factor = %s" % value_factor
unit = dimensionless
Quantity._reduce_cache[key] = (unit, value_factor)
# Create Quantity, then scale (in case value is a container)
# That's why we don't just scale the value.
result = Quantity(self._value, unit)
if value_factor != 1.0:
# __mul__ strips off dimensionless, if appropriate
result = result * value_factor
if unit.is_dimensionless():
assert unit is dimensionless # should have been set earlier in this method
if is_quantity(result):
result = result._value
return result
def __mul__(self, other):
"""Multiply a quantity by another object
Returns a new Quantity that is the product of the self * other,
unless the resulting unit is dimensionless, in which case the
underlying value type is returned, instead of a Quantity.
"""
if is_unit(other):
# print "quantity * unit"
# Many other mul/div operations delegate to here because I was debugging
# a dimensionless unit conversion problem, which I ended up fixing within
# the reduce_unit() method.
unit = self.unit * other
return Quantity(self._value, unit).reduce_unit(self.unit)
elif is_quantity(other):
# print "quantity * quantity"
# Situations where the units cancel can result in scale factors from the unit cancellation.
# To simplify things, delegate Quantity * Quantity to (Quantity * scalar) * unit
return (self * other._value) * other.unit
else:
# print "quantity * scalar"
return self._change_units_with_factor(self.unit, other, post_multiply=False)
# value type might not be commutative for multiplication
def __rmul__(self, other):
"""Multiply a scalar by a Quantity
Returns a new Quantity with the same units as self, but with the value
multiplied by other.
"""
if is_unit(other):
raise NotImplementedError('programmer is surprised __rmul__ was called instead of __mul__')
# print "R unit * quantity"
elif is_quantity(other):
# print "R quantity * quantity"
raise NotImplementedError('programmer is surprised __rmul__ was called instead of __mul__')
else:
# print "scalar * quantity"
return self._change_units_with_factor(self.unit, other, post_multiply=True)
# return Quantity(other * self._value, self.unit)
def __truediv__(self, other):
"""Divide a Quantity by another object
Returns a new Quantity, unless the resulting unit type is dimensionless,
in which case the underlying value type is returned.
"""
if is_unit(other):
# print "quantity / unit"
return self * pow(other, -1.0)
# unit = self.unit / other
# return Quantity(self._value, unit).reduce_unit(self.unit)
elif is_quantity(other):
# print "quantity / quantity"
# Delegate quantity/quantity to (quantity/scalar)/unit
return (self/other._value) / other.unit
else:
# print "quantity / scalar"
return self * pow(other, -1.0)
# return Quantity(self._value / other, self.unit)
__div__ = __truediv__
def __rtruediv__(self, other):
"""Divide a scalar by a quantity.
Returns a new Quantity. The resulting units are the inverse of the self argument units.
"""
if is_unit(other):
# print "R unit / quantity"
raise NotImplementedError('programmer is surprised __rtruediv__ was called instead of __truediv__')
elif is_quantity(other):
raise NotImplementedError('programmer is surprised __rtruediv__ was called instead of __truediv__')
else:
# print "R scalar / quantity"
return other * pow(self, -1.0)
# return Quantity(other / self._value, pow(self.unit, -1.0))
__rdiv__ = __rtruediv__
def __pow__(self, exponent):
"""Raise a Quantity to a power.
Generally both the value and the unit of the Quantity are affected by this operation.
Returns a new Quantity equal to self**exponent.
"""
return Quantity(pow(self._value, exponent), pow(self.unit, exponent))
def sqrt(self):
"""
Returns square root of a Quantity.
Raises ArithmeticError if component exponents are not even.
This behavior can be changed if you present a reasonable real life case to me.
"""
# There might be a conversion factor from taking the square root of the unit
new_value = math.sqrt(self._value)
new_unit = self.unit.sqrt()
unit_factor = self.unit.conversion_factor_to(new_unit*new_unit)
if unit_factor != 1.0:
new_value *= math.sqrt(unit_factor)
return Quantity(value=new_value, unit=new_unit)
def __abs__(self):
"""
Return absolute value of a Quantity.
The unit is unchanged. A negative value of self will result in a positive value
in the result.
"""
return Quantity(abs(self._value), self.unit)
def __pos__(self):
"""
Returns a reference to self.
"""
return Quantity(+(self._value), self.unit)
def __neg__(self):
"""Negate a Quantity.
Returns a new Quantity with a different sign on the value.
"""
return Quantity(-(self._value), self.unit)
def __nonzero__(self):
"""Returns True if value underlying Quantity is zero, False otherwise.
"""
return bool(self._value)
def __complex__(self):
return Quantity(complex(self._value), self.unit)
def __float__(self):
return Quantity(float(self._value), self.unit)
def __int__(self):
return Quantity(int(self._value), self.unit)
def __long__(self):
return Quantity(int(self._value), self.unit)
def value_in_unit(self, unit):
"""
Returns underlying value, in the specified units.
"""
val = self.in_units_of(unit)
if is_quantity(val):
return val._value
else: # naked dimensionless
return val
def value_in_unit_system(self, system):
"""
Returns the underlying value type, after conversion to a particular unit system.
"""
result = self.in_unit_system(system)
if is_quantity(result):
return result._value
else:
return result # dimensionless
def in_unit_system(self, system):
"""
Returns a new Quantity equal to this one, expressed in a particular unit system.
"""
new_units = system.express_unit(self.unit)
f = self.unit.conversion_factor_to(new_units)
return self._change_units_with_factor(new_units, f)
def in_units_of(self, other_unit):
"""
Returns an equal Quantity expressed in different units.
If the units are the same as those in self, a reference to self is returned.
Raises a TypeError if the new unit is not compatible with the original unit.
The post_multiply argument is used in case the multiplication operation is not commutative.
i.e. result = factor * value when post_multiply is False
and result = value * factor when post_multiply is True
"""
if not self.unit.is_compatible(other_unit):
raise TypeError('Unit "%s" is not compatible with Unit "%s".' % (self.unit, other_unit))
f = self.unit.conversion_factor_to(other_unit)
return self._change_units_with_factor(other_unit, f)
def _change_units_with_factor(self, new_unit, factor, post_multiply=True):
# numpy arrays cannot be compared with 1.0, so just "try"
factor_is_identity = False
try:
if (factor == 1.0):
factor_is_identity = True
except ValueError:
pass
if factor_is_identity:
# No multiplication required
if (self.unit is new_unit):
result = self
else:
result = Quantity(self._value, new_unit)
else:
try:
# multiply operator, if it exists, is preferred
if post_multiply:
value = self._value * factor # works for number, numpy.array, or vec3, e.g.
else:
value = factor * self._value # works for number, numpy.array, or vec3, e.g.
result = Quantity(value, new_unit)
except TypeError:
# list * float fails with TypeError
# Presumably a list type
# deep copy
value = self._value[:] # deep copy
# convert tuple to list
try:
value[0] = value[0] # tuple is immutable
except TypeError:
# convert immutable tuple to list
value = []
for i in self._value:
value.append(i)
result = Quantity(self._scale_sequence(value, factor, post_multiply), new_unit)
if (new_unit.is_dimensionless()):
return result._value
else:
return result
def _scale_sequence(self, value, factor, post_multiply):
try:
if post_multiply:
if isinstance(self._value, tuple):
value = tuple([x*factor for x in value])
else:
for i in range(len(value)):
value[i] = value[i]*factor
else:
if isinstance(self._value, tuple):
value = tuple([factor*x for x in value])
else:
for i in range(len(value)):
value[i] = factor*value[i]
except TypeError as ex:
for i in range(len(value)):
value[i] = self._scale_sequence(value[i], factor, post_multiply)
return value
####################################
### Sequence methods of Quantity ###
### in case value is a sequence ###
####################################
def __len__(self):
"""
Return size of internal value type.
"""
return len(self._value)
def __getitem__(self, key):
"""
Keep the same units on contained elements.
"""
assert not is_quantity(self._value[key])
return Quantity(self._value[key], self.unit)
def __setitem__(self, key, value):
# Delegate slices to one-at-a time ___setitem___
if isinstance(key, slice): # slice
indices = key.indices(len(self))
for i in range(*indices):
self[i] = value[i]
else: # single index
# Check unit compatibility
if self.unit.is_dimensionless() and is_dimensionless(value):
pass # OK
elif not self.unit.is_compatible(value.unit):
raise TypeError('Unit "%s" is not compatible with Unit "%s".' % (self.unit, value.unit))
self._value[key] = value / self.unit
assert not is_quantity(self._value[key])
def __delitem__(self, key):
del(self._value[key])
def __contains__(self, item):
return self._value.__contains__(item.value_in_unit(self.unit))
def __iter__(self):
for item in self._value:
yield Quantity(item, self.unit)
def count(self, item):
return self._value.count(item.value_in_unit(self.unit))
def index(self, item):
return self._value.index(item.value_in_unit(self.unit))
def append(self, item):
if is_quantity(item):
return self._value.append(item.value_in_unit(self.unit))
elif is_dimensionless(self.unit):
return self._value.append(item)
else:
raise TypeError("Cannot append item without units into list with units")
def extend(self, rhs):
self._value.extend(rhs.value_in_unit(self.unit))
def insert(self, index, item):
self._value.insert(index, item.value_in_unit(self.unit))
def remove(self, item):
self._value.remove(item)
def pop(self, *args):
return self._value.pop(*args) * self.unit
# list.reverse will automatically delegate correctly
# list.sort with no arguments will delegate correctly
# list.sort with a comparison function cannot be done correctly
def is_quantity(x):
"""
Returns True if x is a Quantity, False otherwise.
"""
return isinstance(x, Quantity)
def is_dimensionless(x):
"""
"""
if is_unit(x):
return x.is_dimensionless()
elif is_quantity(x):
return x.unit.is_dimensionless()
else:
# everything else in the universe is dimensionless
return True
# Strings can cause trouble
# as can any container that has infinite levels of containment
def _is_string(x):
# step 1) String is always a container
# and its contents are themselves containers.
if isinstance(x, str):
return True
try:
first_item = iter(x).next()
inner_item = iter(first_item).next()
if first_item is inner_item:
return True
else:
return False
except TypeError:
return False
except StopIteration:
return False
# run module directly for testing
if __name__=='__main__':
# Test the examples in the docstrings
import doctest, sys
doctest.testmod(sys.modules[__name__])
|
lgpl-2.1
| 5,665,115,278,954,907,000 | 38.282486 | 126 | 0.580469 | false | 4.468509 | false | false | false |
diego-d5000/MisValesMd
|
env/lib/python2.7/site-packages/django/db/backends/sqlite3/schema.py
|
1
|
11456
|
import _sqlite3 # isort:skip
import codecs
import copy
from decimal import Decimal
from django.apps.registry import Apps
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.utils import six
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_delete_table = "DROP TABLE %(table)s"
sql_create_inline_fk = "REFERENCES %(to_table)s (%(to_column)s)"
def quote_value(self, value):
try:
value = _sqlite3.adapt(value)
except _sqlite3.ProgrammingError:
pass
# Manual emulation of SQLite parameter quoting
if isinstance(value, type(True)):
return str(int(value))
elif isinstance(value, (Decimal, float)):
return str(value)
elif isinstance(value, six.integer_types):
return str(value)
elif isinstance(value, six.string_types):
return "'%s'" % six.text_type(value).replace("\'", "\'\'")
elif value is None:
return "NULL"
elif isinstance(value, (bytes, bytearray, six.memoryview)):
# Bytes are only allowed for BLOB fields, encoded as string
# literals containing hexadecimal data and preceded by a single "X"
# character:
# value = b'\x01\x02' => value_hex = b'0102' => return X'0102'
value = bytes(value)
hex_encoder = codecs.getencoder('hex_codec')
value_hex, _length = hex_encoder(value)
# Use 'ascii' encoding for b'01' => '01', no need to use force_text here.
return "X'%s'" % value_hex.decode('ascii')
else:
raise ValueError("Cannot quote parameter value %r of type %s" % (value, type(value)))
def _remake_table(self, model, create_fields=[], delete_fields=[], alter_fields=[], override_uniques=None,
override_indexes=None):
"""
Shortcut to transform a model from old_model into new_model
"""
# Work out the new fields dict / mapping
body = {f.name: f for f in model._meta.local_fields}
# Since mapping might mix column names and default values,
# its values must be already quoted.
mapping = {f.column: self.quote_name(f.column) for f in model._meta.local_fields}
# This maps field names (not columns) for things like unique_together
rename_mapping = {}
# If any of the new or altered fields is introducing a new PK,
# remove the old one
restore_pk_field = None
if any(f.primary_key for f in create_fields) or any(n.primary_key for o, n in alter_fields):
for name, field in list(body.items()):
if field.primary_key:
field.primary_key = False
restore_pk_field = field
if field.auto_created:
del body[name]
del mapping[field.column]
# Add in any created fields
for field in create_fields:
body[field.name] = field
# Choose a default and insert it into the copy map
if not field.many_to_many:
mapping[field.column] = self.quote_value(
self.effective_default(field)
)
# Add in any altered fields
for (old_field, new_field) in alter_fields:
body.pop(old_field.name, None)
mapping.pop(old_field.column, None)
body[new_field.name] = new_field
if old_field.null and not new_field.null:
case_sql = "coalesce(%(col)s, %(default)s)" % {
'col': self.quote_name(old_field.column),
'default': self.quote_value(self.effective_default(new_field))
}
mapping[new_field.column] = case_sql
else:
mapping[new_field.column] = self.quote_name(old_field.column)
rename_mapping[old_field.name] = new_field.name
# Remove any deleted fields
for field in delete_fields:
del body[field.name]
del mapping[field.column]
# Remove any implicit M2M tables
if field.many_to_many and field.rel.through._meta.auto_created:
return self.delete_model(field.rel.through)
# Work inside a new app registry
apps = Apps()
# Provide isolated instances of the fields to the new model body
# Instantiating the new model with an alternate db_table will alter
# the internal references of some of the provided fields.
body = copy.deepcopy(body)
# Work out the new value of unique_together, taking renames into
# account
if override_uniques is None:
override_uniques = [
[rename_mapping.get(n, n) for n in unique]
for unique in model._meta.unique_together
]
# Work out the new value for index_together, taking renames into
# account
if override_indexes is None:
override_indexes = [
[rename_mapping.get(n, n) for n in index]
for index in model._meta.index_together
]
# Construct a new model for the new state
meta_contents = {
'app_label': model._meta.app_label,
'db_table': model._meta.db_table + "__new",
'unique_together': override_uniques,
'index_together': override_indexes,
'apps': apps,
}
meta = type("Meta", tuple(), meta_contents)
body['Meta'] = meta
body['__module__'] = model.__module__
temp_model = type(model._meta.object_name, model.__bases__, body)
# Create a new table with that format. We remove things from the
# deferred SQL that match our table name, too
self.deferred_sql = [x for x in self.deferred_sql if model._meta.db_table not in x]
self.create_model(temp_model)
# Copy data from the old table
field_maps = list(mapping.items())
self.execute("INSERT INTO %s (%s) SELECT %s FROM %s" % (
self.quote_name(temp_model._meta.db_table),
', '.join(self.quote_name(x) for x, y in field_maps),
', '.join(y for x, y in field_maps),
self.quote_name(model._meta.db_table),
))
# Delete the old table
self.delete_model(model, handle_autom2m=False)
# Rename the new to the old
self.alter_db_table(temp_model, temp_model._meta.db_table, model._meta.db_table)
# Run deferred SQL on correct table
for sql in self.deferred_sql:
self.execute(sql.replace(temp_model._meta.db_table, model._meta.db_table))
self.deferred_sql = []
# Fix any PK-removed field
if restore_pk_field:
restore_pk_field.primary_key = True
def delete_model(self, model, handle_autom2m=True):
if handle_autom2m:
super(DatabaseSchemaEditor, self).delete_model(model)
else:
# Delete the table (and only that)
self.execute(self.sql_delete_table % {
"table": self.quote_name(model._meta.db_table),
})
def add_field(self, model, field):
"""
Creates a field on a model.
Usually involves adding a column, but may involve adding a
table instead (for M2M fields)
"""
# Special-case implicit M2M tables
if field.many_to_many and field.rel.through._meta.auto_created:
return self.create_model(field.rel.through)
self._remake_table(model, create_fields=[field])
def remove_field(self, model, field):
"""
Removes a field from a model. Usually involves deleting a column,
but for M2Ms may involve deleting a table.
"""
# M2M fields are a special case
if field.many_to_many:
# For implicit M2M tables, delete the auto-created table
if field.rel.through._meta.auto_created:
self.delete_model(field.rel.through)
# For explicit "through" M2M fields, do nothing
# For everything else, remake.
else:
# It might not actually have a column behind it
if field.db_parameters(connection=self.connection)['type'] is None:
return
self._remake_table(model, delete_fields=[field])
def _alter_field(self, model, old_field, new_field, old_type, new_type,
old_db_params, new_db_params, strict=False):
"""Actually perform a "physical" (non-ManyToMany) field update."""
# Alter by remaking table
self._remake_table(model, alter_fields=[(old_field, new_field)])
def alter_index_together(self, model, old_index_together, new_index_together):
"""
Deals with a model changing its index_together.
Note: The input index_togethers must be doubly-nested, not the single-
nested ["foo", "bar"] format.
"""
self._remake_table(model, override_indexes=new_index_together)
def alter_unique_together(self, model, old_unique_together, new_unique_together):
"""
Deals with a model changing its unique_together.
Note: The input unique_togethers must be doubly-nested, not the single-
nested ["foo", "bar"] format.
"""
self._remake_table(model, override_uniques=new_unique_together)
def _alter_many_to_many(self, model, old_field, new_field, strict):
"""
Alters M2Ms to repoint their to= endpoints.
"""
if old_field.rel.through._meta.db_table == new_field.rel.through._meta.db_table:
# The field name didn't change, but some options did; we have to propagate this altering.
self._remake_table(
old_field.rel.through,
alter_fields=[(
# We need the field that points to the target model, so we can tell alter_field to change it -
# this is m2m_reverse_field_name() (as opposed to m2m_field_name, which points to our model)
old_field.rel.through._meta.get_field(old_field.m2m_reverse_field_name()),
new_field.rel.through._meta.get_field(new_field.m2m_reverse_field_name()),
)],
override_uniques=(new_field.m2m_field_name(), new_field.m2m_reverse_field_name()),
)
return
# Make a new through table
self.create_model(new_field.rel.through)
# Copy the data across
self.execute("INSERT INTO %s (%s) SELECT %s FROM %s" % (
self.quote_name(new_field.rel.through._meta.db_table),
', '.join([
"id",
new_field.m2m_column_name(),
new_field.m2m_reverse_name(),
]),
', '.join([
"id",
old_field.m2m_column_name(),
old_field.m2m_reverse_name(),
]),
self.quote_name(old_field.rel.through._meta.db_table),
))
# Delete the old through table
self.delete_model(old_field.rel.through)
|
mit
| 8,917,223,311,476,454,000 | 43.102362 | 114 | 0.56669 | false | 4.088508 | false | false | false |
AustereCuriosity/astropy
|
astropy/samp/tests/test_hub_proxy.py
|
1
|
1178
|
from ..hub_proxy import SAMPHubProxy
from ..hub import SAMPHubServer
from .. import conf
def setup_module(module):
conf.use_internet = False
class TestHubProxy(object):
def setup_method(self, method):
self.hub = SAMPHubServer(web_profile=False, mode='multiple', pool_size=1)
self.hub.start()
self.proxy = SAMPHubProxy()
self.proxy.connect(hub=self.hub, pool_size=1)
def teardown_method(self, method):
if self.proxy.is_connected:
self.proxy.disconnect()
self.hub.stop()
def test_is_connected(self):
assert self.proxy.is_connected
def test_disconnect(self):
self.proxy.disconnect()
def test_ping(self):
self.proxy.ping()
def test_registration(self):
result = self.proxy.register(self.proxy.lockfile["samp.secret"])
self.proxy.unregister(result['samp.private-key'])
def test_custom_lockfile(tmpdir):
lockfile = tmpdir.join('.samptest').realpath().strpath
hub = SAMPHubServer(web_profile=False, lockfile=lockfile, pool_size=1)
hub.start()
proxy = SAMPHubProxy()
proxy.connect(hub=hub, pool_size=1)
hub.stop()
|
bsd-3-clause
| 5,713,479,540,355,891,000 | 21.653846 | 81 | 0.654499 | false | 3.537538 | true | false | false |
fxia22/pointGAN
|
show_gan_rnn.py
|
1
|
2043
|
from __future__ import print_function
from show3d_balls import *
import argparse
import os
import random
import numpy as np
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim as optim
import torch.utils.data
import torchvision.datasets as dset
import torchvision.transforms as transforms
import torchvision.utils as vutils
from torch.autograd import Variable
from datasets import PartDataset
from pointnet import PointGen, PointGenR
import torch.nn.functional as F
import matplotlib.pyplot as plt
#showpoints(np.random.randn(2500,3), c1 = np.random.uniform(0,1,size = (2500)))
parser = argparse.ArgumentParser()
parser.add_argument('--model', type=str, default = '', help='model path')
opt = parser.parse_args()
print (opt)
gen = PointGenR()
gen.load_state_dict(torch.load(opt.model))
#sim_noise = Variable(torch.randn(5, 2, 20))
#
#sim_noises = Variable(torch.zeros(5, 15, 20))
#
#for i in range(15):
# x = i/15.0
# sim_noises[:,i,:] = sim_noise[:,0,:] * x + sim_noise[:,1,:] * (1-x)
#
#points = gen(sim_noises)
#point_np = points.transpose(2,1).data.numpy()
sim_noise = Variable(torch.randn(5, 6, 20))
sim_noises = Variable(torch.zeros(5, 30 * 5,20))
for j in range(5):
for i in range(30):
x = (1-i/30.0)
sim_noises[:,i + 30 * j,:] = sim_noise[:,j,:] * x + sim_noise[:,(j+1) % 5,:] * (1-x)
points = gen(sim_noises)
point_np = points.transpose(2,1).data.numpy()
print(point_np.shape)
for i in range(150):
print(i)
frame = showpoints_frame(point_np[i])
plt.imshow(frame)
plt.axis('off')
plt.savefig('%s/%04d.png' %('out_rgan', i), bbox_inches='tight')
plt.clf()
#showpoints(point_np)
#sim_noise = Variable(torch.randn(5, 1000, 20))
#points = gen(sim_noise)
#point_np = points.transpose(2,1).data.numpy()
#print(point_np.shape)
#choice = np.random.choice(2500, 2048, replace=False)
#print(point_np[:, choice, :].shape)
#showpoints(point_np)
#np.savez('rgan.npz', points = point_np[:, choice, :])
|
mit
| -721,880,346,011,303,300 | 23.035294 | 92 | 0.675967 | false | 2.82964 | false | false | false |
alex-eri/aiohttp-1
|
aiohttp/client_proto.py
|
1
|
6070
|
import asyncio
import asyncio.streams
from .client_exceptions import (ClientOSError, ClientPayloadError,
ClientResponseError, ServerDisconnectedError)
from .http import HttpResponseParser, StreamWriter
from .streams import EMPTY_PAYLOAD, DataQueue
class ResponseHandler(DataQueue, asyncio.streams.FlowControlMixin):
"""Helper class to adapt between Protocol and StreamReader."""
def __init__(self, *, loop=None, **kwargs):
asyncio.streams.FlowControlMixin.__init__(self, loop=loop)
DataQueue.__init__(self, loop=loop)
self.paused = False
self.transport = None
self.writer = None
self._should_close = False
self._message = None
self._payload = None
self._payload_parser = None
self._reading_paused = False
self._timer = None
self._skip_status = ()
self._tail = b''
self._upgraded = False
self._parser = None
@property
def upgraded(self):
return self._upgraded
@property
def should_close(self):
if (self._payload is not None and
not self._payload.is_eof() or self._upgraded):
return True
return (self._should_close or self._upgraded or
self.exception() is not None or
self._payload_parser is not None or
len(self) or self._tail)
def close(self):
transport = self.transport
if transport is not None:
transport.close()
self.transport = None
return transport
def is_connected(self):
return self.transport is not None
def connection_made(self, transport):
self.transport = transport
self.writer = StreamWriter(self, transport, self._loop)
def connection_lost(self, exc):
if self._payload_parser is not None:
try:
self._payload_parser.feed_eof()
except Exception:
pass
try:
self._parser.feed_eof()
except Exception as e:
if self._payload is not None:
self._payload.set_exception(
ClientPayloadError('Response payload is not completed'))
if not self.is_eof():
if isinstance(exc, OSError):
exc = ClientOSError(*exc.args)
if exc is None:
exc = ServerDisconnectedError()
DataQueue.set_exception(self, exc)
self.transport = self.writer = None
self._should_close = True
self._parser = None
self._message = None
self._payload = None
self._payload_parser = None
self._reading_paused = False
super().connection_lost(exc)
def eof_received(self):
pass
def pause_reading(self):
if not self._reading_paused:
try:
self.transport.pause_reading()
except (AttributeError, NotImplementedError, RuntimeError):
pass
self._reading_paused = True
def resume_reading(self):
if self._reading_paused:
try:
self.transport.resume_reading()
except (AttributeError, NotImplementedError, RuntimeError):
pass
self._reading_paused = False
def set_exception(self, exc):
self._should_close = True
super().set_exception(exc)
def set_parser(self, parser, payload):
self._payload = payload
self._payload_parser = parser
if self._tail:
data, self._tail = self._tail, None
self.data_received(data)
def set_response_params(self, *, timer=None,
skip_payload=False,
skip_status_codes=(),
read_until_eof=False):
self._skip_payload = skip_payload
self._skip_status_codes = skip_status_codes
self._read_until_eof = read_until_eof
self._parser = HttpResponseParser(
self, self._loop, timer=timer,
payload_exception=ClientPayloadError,
read_until_eof=read_until_eof)
if self._tail:
data, self._tail = self._tail, b''
self.data_received(data)
def data_received(self, data):
if not data:
return
# custom payload parser
if self._payload_parser is not None:
eof, tail = self._payload_parser.feed_data(data)
if eof:
self._payload = None
self._payload_parser = None
if tail:
self.data_received(tail)
return
else:
if self._upgraded or self._parser is None:
# i.e. websocket connection, websocket parser is not set yet
self._tail += data
else:
# parse http messages
try:
messages, upgraded, tail = self._parser.feed_data(data)
except BaseException as exc:
import traceback
traceback.print_exc()
self._should_close = True
self.set_exception(
ClientResponseError(code=400, message=str(exc)))
self.transport.close()
return
self._upgraded = upgraded
for message, payload in messages:
if message.should_close:
self._should_close = True
self._message = message
self._payload = payload
if (self._skip_payload or
message.code in self._skip_status_codes):
self.feed_data((message, EMPTY_PAYLOAD), 0)
else:
self.feed_data((message, payload), 0)
if upgraded:
self.data_received(tail)
else:
self._tail = tail
|
apache-2.0
| 8,525,949,508,562,163,000 | 30.780105 | 77 | 0.531796 | false | 4.891217 | false | false | false |
martinhbramwell/evalOfFlask
|
frmwk/forms/attic/demo_forms.py
|
1
|
1202
|
from flask.ext.wtf import Form, TextField, BooleanField, TextAreaField
from flask.ext.wtf import Required, Length
from flask.ext.babel import gettext
from frmwk.model.mdUser import User
class EditForm(Form):
nickname = TextField('nickname', validators = [Required()])
about_me = TextAreaField('about_me', validators = [Length(min = 0, max = 140)])
def __init__(self, original_nickname, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.original_nickname = original_nickname
def validate(self):
if not Form.validate(self):
return False
if self.nickname.data == self.original_nickname:
return True
if self.nickname.data != User.make_valid_nickname(self.nickname.data):
self.nickname.errors.append(gettext('This nickname has invalid characters. Please use letters, numbers, dots and underscores only.'))
return False
user = User.query.filter_by(nickname = self.nickname.data).first()
if user != None:
self.nickname.errors.append(gettext('This nickname is already in use. Please choose another one.'))
return False
return True
|
bsd-3-clause
| 4,632,528,655,272,532,000 | 41.928571 | 145 | 0.65807 | false | 4.202797 | false | false | false |
Zerknechterer/pyload
|
module/plugins/crypter/MultiloadCz.py
|
1
|
1754
|
# -*- coding: utf-8 -*-
import re
from module.plugins.internal.Crypter import Crypter
class MultiloadCz(Crypter):
__name__ = "MultiloadCz"
__type__ = "crypter"
__version__ = "0.41"
__pattern__ = r'http://(?:[^/]*\.)?multiload\.cz/(stahnout|slozka)/.+'
__config__ = [("use_subfolder" , "bool", "Save package to subfolder" , True),
("subfolder_per_pack", "bool", "Create a subfolder for each package" , True),
("usedHoster" , "str" , "Prefered hoster list (bar-separated)", "" ),
("ignoredHoster" , "str" , "Ignored hoster list (bar-separated)" , "" )]
__description__ = """Multiload.cz decrypter plugin"""
__license__ = "GPLv3"
__authors__ = [("zoidberg", "[email protected]")]
FOLDER_PATTERN = r'<form action="" method="get"><textarea.*?>([^>]*)</textarea></form>'
LINK_PATTERN = r'<p class="manager-server"><strong>([^<]+)</strong></p><p class="manager-linky"><a href="(.+?)">'
def decrypt(self, pyfile):
self.html = self.load(pyfile.url, decode=True)
if re.match(self.__pattern__, pyfile.url).group(1) == "slozka":
m = re.search(self.FOLDER_PATTERN, self.html)
if m:
self.urls.extend(m.group(1).split())
else:
m = re.findall(self.LINK_PATTERN, self.html)
if m:
prefered_set = set(self.getConfig('usedHoster').split('|'))
self.urls.extend(x[1] for x in m if x[0] in prefered_set)
if not self.urls:
ignored_set = set(self.getConfig('ignoredHoster').split('|'))
self.urls.extend(x[1] for x in m if x[0] not in ignored_set)
|
gpl-3.0
| -8,429,412,199,865,437,000 | 40.761905 | 117 | 0.530217 | false | 3.303202 | false | false | false |
openconfig/oc-pyang
|
openconfig_pyang/plugins/util/yangpath.py
|
1
|
1676
|
"""Copyright 2016 The OpenConfig Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Utilities for manipulating YANG paths
"""
import re
def split_paths(path):
"""Return a list of path elements.
Args:
path: A YANG path string specified as /a/b
Returns:
A list of path components
"""
components = path.split("/")
return [c for c in components if c]
def strip_namespace(path):
"""Removes namespace prefixes from elements of the supplied path.
Args:
path: A YANG path string
Returns:
A YANG path string with the namespaces removed.
"""
re_ns = re.compile(r"^.+:")
path_components = [re_ns.sub("", comp) for comp in path.split("/")]
pathstr = "/".join(path_components)
return pathstr
def remove_last(path):
"""Removes the last path element and returns both parts.
Note the last '/' is not returned in either part.
Args:
path: A path string represented as a / separated string
Returns:
A tuple of:
0: the path with the last element removed (string)
1: the name of the last element (string)
"""
components = path.split("/")
last = components.pop()
prefix = "/".join(components)
return (prefix, last)
|
apache-2.0
| 9,101,595,041,839,160,000 | 24.014925 | 72 | 0.704057 | false | 4.038554 | false | false | false |
rossant/galry
|
examples/mandelbrot.py
|
1
|
1978
|
"""GPU-based interactive Mandelbrot fractal example."""
from galry import *
import numpy as np
import numpy.random as rdn
FSH = """
// take a position and a number of iterations, and
// returns the first iteration where the system escapes a box of size N.
int mandelbrot_escape(vec2 pos, int iterations)
{
vec2 z = vec2(0., 0.);
int n = 0;
int N = 10;
int N2 = N * N;
float r2 = 0.;
for (int i = 0; i < iterations; i++)
{
float zx = z.x * z.x - z.y * z.y + pos.x;
float zy = 2 * z.x * z.y + pos.y;
r2 = zx * zx + zy * zy;
if (r2 > N2)
{
n = i;
break;
}
z = vec2(zx, zy);
}
return n;
}
"""
FS = """
// this vector contains the coordinates of the current pixel
// varying_tex_coords contains a position in [0,1]^2
vec2 pos = vec2(-2.0 + 3. * varying_tex_coords.x,
-1.5 + 3. * varying_tex_coords.y);
// run mandelbrot system
int n = mandelbrot_escape(pos, iterations);
float c = log(float(n)) / log(float(iterations));
// compute the red value as a function of n
out_color = vec4(c, 0., 0., 1.);
"""
def get_iterations(zoom=1):
return int(500 * np.log(1 + zoom))
class MandelbrotVisual(TextureVisual):
def initialize_fragment(self):
self.add_fragment_header(FSH)
self.add_fragment_main(FS)
def base_mandelbrot(self, iterations=None):
if iterations is None:
iterations = get_iterations()
self.add_uniform("iterations", vartype="int", ndim=1, data=iterations)
def initialize(self, *args, **kwargs):
iterations = kwargs.pop('iterations', None)
super(MandelbrotVisual, self).initialize(*args, **kwargs)
self.base_mandelbrot(iterations)
def update(figure, parameter):
zoom = figure.get_processor('navigation').sx
figure.set_data(iterations=get_iterations(zoom))
figure(constrain_ratio=True,
constrain_navigation=True,)
visual(MandelbrotVisual)
# event('Pan', pan)
event('Zoom', update)
show()
|
bsd-3-clause
| 5,936,480,683,851,878,000 | 24.701299 | 78 | 0.627401 | false | 3.033742 | false | false | false |
textioHQ/pattern
|
examples/05-vector/06-svm.py
|
1
|
4083
|
from __future__ import print_function
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", ".."))
import random
from pattern.db import Datasheet
from pattern_text.nl import tag, predicative
from pattern.vector import SVM, KNN, NB, count, shuffled
# This example demonstrates a Support Vector Machine (SVM).
# SVM is a robust classifier that uses "kernel" functions.
# See: http://www.clips.ua.ac.be/pages/pattern-vector#svm
#
# As a metaphor, imagine the following game:
# - The ground is scattered with red and blue marbles.
# - It is your task to separate them using a single, straight line.
#
# The separation is going to be a rough approximation, obviously.
#
# Now imagine the following game:
# - The room is filled with static, floating red and blue marbles.
# - It is your task to separate them by inserting a glass panel between them.
#
# The 3-D space gives a lot more options. Adding more dimensions add even more options.
# This is roughly what a SVM does, using kernel functions to push the separation
# to a higher dimension.
# Pattern includes precompiled C binaries of libsvm.
# If these do not work on your system you have to compile libsvm manually.
# You can also change the "SVM()" statement below with "KNN()",
# so you can still follow the rest of the example.
classifier = SVM()
# We'll build a classifier to predict sentiment in Dutch movie reviews.
# For example, "geweldige film!" (great movie) indicates a positive sentiment.
# The CSV file at pattern/test/corpora/polarity-nl-bol.com.csv
# contains 1,500 positive and 1,500 negative reviews.
# The pattern.vector module has a shuffled() function
# which we use to randomly arrange the reviews in the list:
print("loading data...")
data = os.path.join(os.path.dirname(__file__), "..", "..",
"test", "corpora", "polarity-nl-bol.com.csv")
data = Datasheet.load(data)
data = shuffled(data)
# We do not necessarily need Document objects as in the previous examples.
# We can train any classifier on simple Python dictionaries too.
# This is sometimes easier if you want full control over the data.
# The instance() function below returns a train/test instance for a given review:
# 1) parse the review for part-of-speech tags,
# 2) keep adjectives, adverbs and exclamation marks (these mainly carry sentiment),
# 3) lemmatize the Dutch adjectives, e.g., "goede" => "goed" (good).
# 4) count the distinct words in the list, map it to a dictionary.
def instance(review): # "Great book!"
# [("Great", "JJ"), ("book", "NN"), ("!", "!")]
v = tag(review)
v = [word for (word, pos) in v if pos in ("JJ", "RB") or word in ("!")]
v = [predicative(word) for word in v] # ["great", "!", "!"]
v = count(v) # {"great": 1, "!": 1}
return v
# We can add any kind of features to a custom instance dict.
# For example, in a deception detection experiment
# we may want to populate the dict with PRP (pronouns), punctuation marks,
# average sentence length, a score for word diversity, etc.
# Use 1,000 random instances as training material.
print("training...")
for score, review in data[:1000]:
classifier.train(instance(review), type=int(score) > 0)
# classifier.save("sentiment-nl-svm.p")
#classifier = SVM.load("sentiment-nl-svm.p")
# Use 500 random instances as test.
print("testing...")
i = n = 0
for score, review in data[1000:1500]:
if classifier.classify(instance(review)) == (int(score) > 0):
i += 1
n += 1
# The overall accuracy is around 82%.
# A Naieve Bayes classifier has about 78% accuracy.
# A KNN classifier has about 80% accuracy.
# Careful: to get a reliable score you need to calculate precision and recall,
# study the documentation at:
# http://www.clips.ua.ac.be/pages/pattern-metrics#accuracy
print(float(i) / n)
# The work is not done here.
# Low accuracy is disappointing, but high accuracy is often suspicious.
# Things to look out for:
# - distinction between train and test set,
# - overfitting: http://en.wikipedia.org/wiki/Overfitting
|
bsd-3-clause
| -5,175,451,043,334,958,000 | 38.640777 | 87 | 0.70267 | false | 3.405338 | true | false | false |
Reigel/kansha
|
kansha/authentication/ldap/ldap_auth.py
|
1
|
4186
|
# -*- coding:utf-8 -*-
# --
# Copyright (c) 2012-2014 Net-ng.
# All rights reserved.
#
# This software is licensed under the BSD License, as described in
# the file LICENSE.txt, which you should have received as part of
# this distribution.
# --
from nagare import log
try:
import ldap
except ImportError:
ldap = None
import sys
import types
def toUTF8(v):
if isinstance(v, unicode):
return v.encode('utf-8')
elif isinstance(v, (types.TupleType, types.ListType)):
return [toUTF8(e) for e in v]
elif isinstance(v, types.DictType):
return dict([(toUTF8(k), toUTF8(v_)) for k, v_ in v.items()])
else:
return v
class LDAPAuth(object):
def __init__(self, ldap_cfg):
ldap_cfg = toUTF8(ldap_cfg)
self.server = "ldap://%s:%s" % (ldap_cfg['host'], ldap_cfg['port'])
self.users_base_dn = ldap_cfg['users_base_dn']
def connect(self):
"""Connect to LDAP server
Return:
- a server connection
"""
assert ldap, 'python_ldap not installed'
return ldap.initialize(self.server)
def get_user_dn(self, uid):
raise NotImplementedError()
def check_password(self, uid, password):
"""Check if the specified couple user/password is correct
In:
- ``uid`` -- the user id
- ``password`` -- the user password
Return:
- True if password is checked
"""
c = self.connect()
dn = self.get_user_dn(uid)
# Try to authenticate
try:
c.simple_bind_s(dn, password.encode('UTF-8'))
return True
except ldap.INVALID_CREDENTIALS:
log.info("Bad credentials for DN %r" % dn)
except ldap.SERVER_DOWN:
log.critical("LDAP server down")
finally:
c.unbind()
def get_profile(self, uid, password):
raise NotImplementedError()
class NngLDAPAuth(LDAPAuth):
def get_user_dn(self, uid):
"""Construct a user DN given an user id
In:
- ``uid`` -- the user id
Return:
- a string, the user DN
"""
return 'uid=%s,%s' % (ldap.dn.escape_dn_chars(toUTF8(uid)), self.users_base_dn)
def get_profile(self, uid, password):
c = self.connect()
ldap_result = c.search_s(self.get_user_dn(uid), ldap.SCOPE_BASE)[0][1]
profile = {}
profile['uid'] = ldap_result['uid'][0]
profile['name'] = ldap_result['displayName'][0].decode('utf-8')
profile['email'] = ldap_result['mail'][0]
profile['picture'] = ldap_result['jpegPhoto'][0] if 'jpegPhoto' in ldap_result else None
return profile
class ADLDAPAuth(LDAPAuth):
def connect(self):
conn = super(ADLDAPAuth, self).connect()
conn.set_option(ldap.OPT_REFERRALS, 0)
conn.protocol_version = 3
return conn
def check_password(self, uid, password):
"""Check if the specified couple user/password is correct
In:
- ``uid`` -- the user id
- ``password`` -- the user password
Return:
- True if password is checked
"""
c = self.connect()
# Try to authenticate
try:
c.simple_bind_s(uid, password)
return True
except ldap.INVALID_CREDENTIALS:
log.info("Bad credentials for uid %r" % uid)
except ldap.SERVER_DOWN:
log.critical("LDAP server down")
finally:
c.unbind()
def get_profile(self, uid, password):
c = self.connect()
c.simple_bind_s(uid, password)
ldap_result = c.search_s(self.users_base_dn, ldap.SCOPE_SUBTREE,
'(userPrincipalName=%s)' % ldap.dn.escape_dn_chars(toUTF8(uid)))[0][1]
profile = {}
profile['uid'] = ldap_result['sAMAccountName'][0]
profile['name'] = ldap_result['displayName'][0].decode('utf-8')
profile['email'] = ldap_result.get('mail', [''])[0]
profile['picture'] = ldap_result['thumbnailPhoto'][0] if 'thumbnailPhoto' in ldap_result else None
c.unbind()
return profile
|
bsd-3-clause
| 3,308,616,112,407,735,300 | 29.779412 | 106 | 0.567845 | false | 3.75763 | false | false | false |
CrowdStrike/kafka-python
|
setup.py
|
1
|
1752
|
import sys
from setuptools import setup, Command
with open('VERSION', 'r') as v:
__version__ = v.read().rstrip()
class Tox(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
@classmethod
def run(cls):
import tox
sys.exit(tox.cmdline([]))
test_require = ['tox', 'mock']
if sys.version_info < (2, 7):
test_require.append('unittest2')
setup(
name="kafka-python",
version=__version__,
tests_require=test_require,
cmdclass={"test": Tox},
packages=[
"kafka",
"kafka.consumer",
"kafka.partitioner",
"kafka.producer",
],
author="David Arthur",
author_email="[email protected]",
url="https://github.com/mumrah/kafka-python",
license="Apache License 2.0",
description="Pure Python client for Apache Kafka",
long_description="""
This module provides low-level protocol support for Apache Kafka as well as
high-level consumer and producer classes. Request batching is supported by the
protocol as well as broker-aware request routing. Gzip and Snappy compression
is also supported for message sets.
""",
keywords="apache kafka",
install_requires=['six'],
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Software Development :: Libraries :: Python Modules",
]
)
|
apache-2.0
| 5,926,308,993,090,215,000 | 24.764706 | 78 | 0.627283 | false | 4.112676 | true | false | false |
davy39/eric
|
eric6_trpreviewer.py
|
1
|
2399
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2004 - 2014 Detlev Offenbach <[email protected]>
#
"""
Eric6 TR Previewer.
This is the main Python script that performs the necessary initialization
of the tr previewer and starts the Qt event loop. This is a standalone version
of the integrated tr previewer.
"""
from __future__ import unicode_literals
import Toolbox.PyQt4ImportHook # __IGNORE_WARNING__
try: # Only for Py2
import Utilities.compatibility_fixes # __IGNORE_WARNING__
except (ImportError):
pass
import sys
for arg in sys.argv:
if arg.startswith("--config="):
import Globals
configDir = arg.replace("--config=", "")
Globals.setConfigDir(configDir)
sys.argv.remove(arg)
break
from E5Gui.E5Application import E5Application
from Tools.TRSingleApplication import TRSingleApplicationClient
from Globals import AppInfo
from Toolbox import Startup
def createMainWidget(argv):
"""
Function to create the main widget.
@param argv list of commandline parameters (list of strings)
@return reference to the main widget (QWidget)
"""
from Tools.TRPreviewer import TRPreviewer
if len(argv) > 1:
files = argv[1:]
else:
files = []
previewer = TRPreviewer(files, None, 'TRPreviewer')
return previewer
def main():
"""
Main entry point into the application.
"""
options = [
("--config=configDir",
"use the given directory as the one containing the config files"),
]
appinfo = AppInfo.makeAppInfo(sys.argv,
"Eric6 TR Previewer",
"file",
"TR file previewer",
options)
app = E5Application(sys.argv)
client = TRSingleApplicationClient()
res = client.connect()
if res > 0:
if len(sys.argv) > 1:
client.processArgs(sys.argv[1:])
sys.exit(0)
elif res < 0:
print("eric6_trpreviewer: {0}".format(client.errstr()))
sys.exit(res)
else:
res = Startup.simpleAppStartup(sys.argv,
appinfo,
createMainWidget,
app=app)
sys.exit(res)
if __name__ == '__main__':
main()
|
gpl-3.0
| -8,780,271,109,807,929,000 | 25.076087 | 78 | 0.581909 | false | 4.045531 | true | false | false |
h-friederich/lpm
|
login.py
|
1
|
3179
|
from functools import wraps
from flask import Blueprint, request, redirect, render_template, url_for, g, flash
from flask.ext.login import LoginManager, login_user, logout_user, current_user
from flask_wtf import Form
from wtforms import StringField, PasswordField
import base64
from . import auth
login_manager = LoginManager()
login_manager.login_view = 'login.login'
bp = Blueprint('login', __name__)
def init(app):
login_manager.init_app(app)
auth.init(app)
class LoginForm(Form):
# Note: no input validation, submitted value will be handed in the auth module itself
# otherwise we'd have to fetch the full user list for every login
username = StringField('User')
password = PasswordField('Password')
@bp.route('/login', methods=["GET", "POST"])
def login():
"""
Presents the login page
If login data is POSTed, the credentials are validated and the user logged in if successful
"""
form = LoginForm()
if request.method == 'POST' and form.is_submitted():
usr = auth.auth_user(form.username.data, form.password.data)
if usr and usr.has_role('login') and usr.is_active:
login_user(usr)
return redirect(request.args.get('next') or url_for('items.overview'))
elif usr is None:
flash('invalid credentials', 'error')
elif not usr.is_active:
flash('login expired', 'error')
else:
flash('insufficient permissions', 'error')
return render_template('login.html', form=form)
@bp.route('/logout')
def logout():
"""
Performs a logout on the user
"""
logout_user()
return redirect(url_for('login.login'))
def role_required(roles):
"""
Decorator that ensures the current user has
- one of the specified roles (if a tuple)
- the specified role (otherwise)
"""
def real_role_required(f):
@wraps(f)
def wrapper(*args, **kwargs):
introles = roles
if not isinstance(introles, tuple):
introles = (introles,)
valid = False
if current_user.is_authenticated:
for role in introles:
if current_user.has_role(role):
valid = True
break
if not valid:
flash('insufficient privileges to access this page', 'danger')
return login_manager.unauthorized()
return f(*args, **kwargs)
return wrapper
return real_role_required
@login_manager.user_loader
def load_user(username):
"""
Default user loader for the login plugin
"""
return auth.get_user(username)
@login_manager.request_loader
def load_from_request(request):
"""
User loader from basic authorization header
(i.e. for external API)
"""
try:
authinfo = request.headers.get('Authorization', '').replace('Basic ', '', 1)
username, password = base64.b64decode(authinfo).decode('utf-8').split(':')
except:
return None
usr = auth.auth_user(username, password)
if usr and usr.has_role('request_login'):
return usr
return None
|
bsd-3-clause
| 5,078,731,911,012,853,000 | 29 | 95 | 0.621579 | false | 4.101935 | false | false | false |
mortentoo/maya
|
scripts/animvr/export_obj_sequence.py
|
1
|
1035
|
import pymel.core as pm
class ExportObjSequence:
def __init__(self):
self.objects = []
self.start_frame = None
self.end_frame = None
def export(self):
if not pm.ls(sl=True):
pm.warning("No objects selected")
return
path = pm.fileDialog2(fileFilter="*.obj", dialogStyle=2, fileMode=0,
dir=pm.workspace.path)
if not path:
return
path = path[0]
for f in range(self.start_frame, self.end_frame + 1):
frame_path = ('%s_%04d.obj' % (path[:-4], f))
print frame_path
pm.currentTime(f)
pm.exportSelected(frame_path, force=True, options="groups=1;ptgroups=1;materials=0;smoothing=1;normals=1",
typ="OBJexport", preserveReferences=False, exportSelected=True)
if __name__ == '__main__':
eos = ExportObjSequence()
eos.start_frame = 1
eos.end_frame = 72
eos.export()
|
gpl-3.0
| -4,551,239,341,659,585,500 | 27.75 | 118 | 0.523671 | false | 3.833333 | false | false | false |
patrick-winter-knime/mol-struct-nets
|
molstructnets/steps/prediction/tensor2d/tensor_2d.py
|
1
|
3072
|
import h5py
from keras import models
import numpy
from steps.prediction.shared.tensor2d import prediction_array
from util import data_validation, file_structure, progressbar, logger, file_util, hdf5_util, misc
class Tensor2D:
@staticmethod
def get_id():
return 'tensor_2d'
@staticmethod
def get_name():
return 'Network (Grid)'
@staticmethod
def get_parameters():
parameters = list()
parameters.append({'id': 'batch_size', 'name': 'Batch Size', 'type': int, 'default': 100, 'min': 1,
'description': 'Number of data points that will be processed together. A higher number leads'
' to faster processing but needs more memory. Default: 100'})
parameters.append({'id': 'number_predictions', 'name': 'Predictions per data point', 'type': int, 'default': 1,
'min': 1, 'description': 'The number of times a data point is predicted (with different'
' transformations). The result is the mean of all predictions. Default: 1'})
return parameters
@staticmethod
def check_prerequisites(global_parameters, local_parameters):
data_validation.validate_preprocessed_specs(global_parameters)
data_validation.validate_network(global_parameters)
@staticmethod
def execute(global_parameters, local_parameters):
prediction_path = file_structure.get_prediction_file(global_parameters)
if file_util.file_exists(prediction_path):
logger.log('Skipping step: ' + prediction_path + ' already exists')
else:
array = prediction_array.PredictionArrays(global_parameters, local_parameters['batch_size'],
transformations=local_parameters['number_predictions'])
predictions = numpy.zeros((len(array.input), 2))
temp_prediction_path = file_util.get_temporary_file_path('tensor_prediction')
model_path = file_structure.get_network_file(global_parameters)
model = models.load_model(model_path)
logger.log('Predicting data')
chunks = misc.chunk_by_size(len(array.input), local_parameters['batch_size'])
with progressbar.ProgressBar(len(array.input) * local_parameters['number_predictions']) as progress:
for iteration in range(local_parameters['number_predictions']):
for chunk in chunks:
predictions[chunk['start']:chunk['end']] += model.predict(array.input.next())[:]
progress.increment(chunk['size'])
predictions /= local_parameters['number_predictions']
array.close()
prediction_h5 = h5py.File(temp_prediction_path, 'w')
hdf5_util.create_dataset_from_data(prediction_h5, file_structure.Predictions.prediction, predictions)
prediction_h5.close()
file_util.move_file(temp_prediction_path, prediction_path)
|
gpl-3.0
| -7,248,230,788,257,675,000 | 51.067797 | 128 | 0.623372 | false | 4.517647 | false | false | false |
tomicic/ModelMMORPG
|
sc.py
|
1
|
3434
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
from spade.Agent import BDIAgent
from spade.Behaviour import OneShotBehaviour, EventBehaviour, ACLTemplate, MessageTemplate
from spade.ACLMessage import ACLMessage
from spade.AID import aid
from spade.SWIKB import SWIKB as KB
Overflow = 0.00
'''
TODO:
Reimplement agents and their behaviours as in SSSHS, but add organizational units, see:
https://github.com/javipalanca/spade/blob/master/spade/Organization.py
https://github.com/javipalanca/spade/blob/master/spade/Organization_new.py
Implement communication via messaging.
Store data about agents state in knowledge base.
'''
class Report( OneShotBehaviour ):
''' Reporting behaviour to be added on the fly at the end of simulation with addBehaviour() '''
def _process( self ):
''' Print out the stats of all storages '''
''' TODO: Would be nice to produce some visualization on this '''
with self.myAgent:
totalInterventions = economyRequests + delayRequests + restoreEconomyRequests + advanceRequests + giveRequests + negotiationRequests
global Overflow
for s in storages:
Overflow += s.ResourceLoss
say( ".... [ END OF SIMULATION ] ...." )
say( "******* Number of system interventions: %d" % totalInterventions )
say( "*********** First intervention happened at time: %d" % firstIntervention )
say( "******* Number of LT ALERTS: %d" % LTalerts )
say( "*********** Number of DELAY requests: %d" % delayRequests )
say( "*********** Number of ECONOMY requests: %d" % economyRequests )
say( "*********** Number of NEGOTIATION requests: %d" % negotiationRequests )
say( "******* Number of UT ALERTS: %d" % UTalerts )
say( "*********** Number of RESTORE requests: %d" % restoreEconomyRequests )
say( "*********** Number of ADVANCE requests: %d" % advanceRequests )
say( "*********** Number of GIVE requests: %d" % giveRequests )
say( "*********** Overflow of resources: %f" % Overflow )
for s in storages:
say( "INDIVIDUAL REPORT FOR STORAGE %s" % s.name )
say( "- Capacity: %d" % s.maxCapacity )
say( "- CRL: %d" % s.currentResourceLevel )
say( "- UT alerts: %d" % s.myUTalerts )
say( "- Advance reqs: %d" % s.myAdvanceReqs )
say( "- Resources lost: %f" % s.ResourceLoss )
say( "- LT alerts: %d" % s.myLTalerts )
say( "- Economy reqs: %d" % s.myEconomyReqs )
say( "- Delay reqs: %d" % s.myDelayReqs )
say( "CRL HISTORY: %s" % s.CRLhistory )
say( "OVERFLOW per time unit: %s" % s.overflowHistory )
class TalkingAgent( BDIAgent ):
''' Agent that prints to the console
Abstract - only to be inherited by other agent classes
'''
def say( self, msg ):
''' Say something (e.g. print to console for debug purposes) '''
print '%s: %s' % ( self.name.split( '@' )[ 0 ], str( msg ) )
class Observer( TalkingAgent ):
''' Observer agent -- collects statstical data about all other agents '''
def _setup( self ):
''' Setup the agent's knowledge base '''
self.kb = KB()
self.report = Report()
class Storage( TalkingAgent ):
''' A storage in a settlement '''
def _setup( self ):
pass
class Consumer( TalkingAgent ):
''' A consumer in a settlement '''
def _setup( self ):
pass
class Producer( TalkingAgent ):
''' A producer in a settlement '''
def _setup( self ):
pass
if __name__ == '__main__':
''' Add simulation configuration here (e.g. number of agents, organizational units, hierarchy'''
pass
|
gpl-3.0
| 5,696,110,524,052,965,000 | 33.686869 | 135 | 0.655213 | false | 3.124659 | false | false | false |
EnviDat/ckanext-envidat_theme
|
ckanext/envidat_theme/action.py
|
1
|
3448
|
# coding: utf8
from ckan.logic import side_effect_free
from ckan.logic.action.get import user_show
import ckan.plugins.toolkit as toolkit
from ckanext.passwordless import util
import json
from xml.etree import ElementTree
import requests
from logging import getLogger
log = getLogger(__name__)
@side_effect_free
def context_user_show(context, data_dict):
user = envidat_get_user_from_context(context)
if user:
return {'user': user}
else:
return {}
@side_effect_free
def envidat_get_author_data(context, data_dict):
user_data = get_author_data(context, data_dict)
return user_data
def envidat_get_user_from_context(context):
auth_user_obj = context.get('auth_user_obj', None)
if auth_user_obj:
auth_user_obj_dict = auth_user_obj.as_dict()
user_data = user_show(context, {'id': auth_user_obj_dict['id']})
auth_user_obj_dict["email_hash"] = user_data["email_hash"]
# renew the master key
apikey = util.renew_master_token(auth_user_obj_dict['name'])
auth_user_obj_dict["apikey"] = apikey
return auth_user_obj_dict
else:
return {}
def get_author_data(context, data_dict):
context['ignore_auth'] = True
email = data_dict.get('email', '').strip().lower()
if email:
try:
search_results = toolkit.get_action(
'package_search')(
context,
{'q': 'author:\"' + email + '\"',
'sort': 'metadata_modified desc'}
)
except Exception as e:
log.error("exception {0}".format(e))
return {}
author_data = {}
if search_results.get('count', 0) > 0:
author_data_list = []
for dataset in search_results.get('results', []):
author_data_list += [a for a in json.loads(dataset.get('author'))
if a.get('email', '').strip().lower() == email]
# copy dictionary field by field including empty fields
author_data = {}
if author_data_list:
for k, v in author_data_list[0].items():
if v and len(v) > 0:
author_data[k] = "{0}".format(v).strip()
else:
author_data[k] = ""
# fill up empty fields from older datasets
for author in author_data_list:
for k, v in author.items():
if not author_data.get(k) or (len(author_data.get(k, "")) < 1):
# TODO skip affiliation
author_data[k] = "{0}".format(v).strip()
# TODO check if the orcid is empty request from ORCID API
# https://www.envidat.ch/orcid/search/?q=email:*@wsl.ch
if not author_data.get('identifier'):
author_data['identifier'] = get_orcid_id(email)
return author_data
return {}
def get_orcid_id(email):
try:
api_call = 'https://www.envidat.ch/orcid/search/?q=email:{0}'.format(email)
req = requests.get(api_call)
root = ElementTree.fromstring(req.content)
path = root.find(".//{http://www.orcid.org/ns/common}path")
orcid_id = path.text
return orcid_id
except AttributeError:
return ''
except Exception as e:
log.error('Failed to get orcid_id: {0}'.format(e))
return ''
|
agpl-3.0
| -2,467,828,462,179,743,000 | 27.262295 | 84 | 0.555394 | false | 3.719525 | false | false | false |
rbarlow/pulp_python
|
plugins/test/unit/plugins/importers/test_importer.py
|
1
|
7890
|
"""
Contains tests for pulp_python.plugins.importers.importer.
"""
from gettext import gettext as _
import unittest
import mock
from pulp_python.common import constants
from pulp_python.plugins import models
from pulp_python.plugins.importers import importer
class TestEntryPoint(unittest.TestCase):
"""
Tests for the entry_point() function.
"""
def test_return_value(self):
"""
Assert the correct return value for the entry_point() function.
"""
return_value = importer.entry_point()
expected_value = (importer.PythonImporter, {})
self.assertEqual(return_value, expected_value)
class TestPythonImporter(unittest.TestCase):
"""
This class contains tests for the PythonImporter class.
"""
@mock.patch('pulp.server.controllers.repository.get_unit_model_querysets', spec_set=True)
@mock.patch('pulp.server.controllers.repository.associate_single_unit', spec_set=True)
def test_import_units_units_none(self, mock_associate, mock_get):
"""
Assert correct behavior when units == None.
"""
python_importer = importer.PythonImporter()
dest_repo = mock.MagicMock()
source_repo = mock.MagicMock()
units = ['unit_a', 'unit_b', 'unit_3']
mock_get.return_value = [units]
imported_units = python_importer.import_units(source_repo, dest_repo, mock.MagicMock(),
mock.MagicMock(), units=None)
mock_get.assert_called_once_with(source_repo.repo_obj.repo_id, models.Package)
# Assert that the units were associated correctly
associate_unit_call_args = [c[1] for c in mock_associate.mock_calls]
self.assertEqual(associate_unit_call_args, [(dest_repo.repo_obj, u) for u in units])
# Assert that the units were returned
self.assertEqual(imported_units, units)
@mock.patch('pulp.server.controllers.repository.associate_single_unit', spec_set=True)
def test_import_units_units_not_none(self, mock_associate):
"""
Assert correct behavior when units != None.
"""
python_importer = importer.PythonImporter()
dest_repo = mock.MagicMock()
units = ['unit_a', 'unit_b', 'unit_3']
imported_units = python_importer.import_units(mock.MagicMock(), dest_repo, mock.MagicMock(),
mock.MagicMock(), units=units)
# Assert that the units were associated correctly
associate_unit_call_args = [c[1] for c in mock_associate.mock_calls]
self.assertEqual(associate_unit_call_args, [(dest_repo.repo_obj, u) for u in units])
# Assert that the units were returned
self.assertEqual(imported_units, units)
def test_metadata(self):
"""
Test the metadata class method's return value.
"""
metadata = importer.PythonImporter.metadata()
expected_value = {
'id': constants.IMPORTER_TYPE_ID, 'display_name': _('Python Importer'),
'types': [constants.PACKAGE_TYPE_ID]}
self.assertEqual(metadata, expected_value)
@mock.patch('pulp_python.plugins.importers.importer.shutil.rmtree')
@mock.patch('pulp_python.plugins.importers.importer.sync.SyncStep.__init__')
@mock.patch('pulp_python.plugins.importers.importer.sync.SyncStep.sync')
@mock.patch('pulp_python.plugins.importers.importer.tempfile.mkdtemp')
def test_sync_repo_failure(self, mkdtemp, sync, __init__, rmtree):
"""
Test the sync_repo() method when the sync fails.
"""
config = mock.MagicMock()
python_importer = importer.PythonImporter()
repo = mock.MagicMock()
sync_conduit = mock.MagicMock()
# Fake the sync raising some bogus error
sync.side_effect = IOError('I/O error, lol!')
__init__.return_value = None
try:
python_importer.sync_repo(repo, sync_conduit, config)
except IOError as e:
# Make sure the error was passed on as it should have been
self.assertEqual(str(e), 'I/O error, lol!')
# A temporary working dir should have been created in the repo's working dir
mkdtemp.assert_called_once_with(dir=repo.working_dir)
# No matter what happens, it's important that we cleaned up the temporary dir
rmtree.assert_called_once_with(mkdtemp.return_value, ignore_errors=True)
# Make sure the SyncStep was initialized correctly
__init__.assert_called_once_with(repo=repo, conduit=sync_conduit, config=config,
working_dir=mkdtemp.return_value)
# Make sure all the right args were passed on to sync()
sync.assert_called_once_with()
@mock.patch('pulp_python.plugins.importers.importer.shutil.rmtree')
@mock.patch('pulp_python.plugins.importers.importer.sync.SyncStep.__init__')
@mock.patch('pulp_python.plugins.importers.importer.sync.SyncStep.sync')
@mock.patch('pulp_python.plugins.importers.importer.tempfile.mkdtemp')
def test_sync_repo_success(self, mkdtemp, sync, __init__, rmtree):
"""
Test the sync_repo() method when the sync is successful.
"""
config = mock.MagicMock()
python_importer = importer.PythonImporter()
repo = mock.MagicMock()
sync_conduit = mock.MagicMock()
sync_report = mock.MagicMock()
sync.return_value = sync_report
__init__.return_value = None
return_value = python_importer.sync_repo(repo, sync_conduit, config)
# A temporary working dir should have been created in the repo's working dir
mkdtemp.assert_called_once_with(dir=repo.working_dir)
# No matter what happens, it's important that we cleaned up the temporary dir
rmtree.assert_called_once_with(mkdtemp.return_value, ignore_errors=True)
# Make sure the SyncStep was initialized correctly
__init__.assert_called_once_with(repo=repo, conduit=sync_conduit, config=config,
working_dir=mkdtemp.return_value)
# Make sure all the right args were passed on to sync()
sync.assert_called_once_with()
# And, of course, assert that the sync report was returned
self.assertEqual(return_value, sync_report)
@mock.patch('pulp.server.controllers.repository.rebuild_content_unit_counts', spec_set=True)
@mock.patch('pulp.server.controllers.repository.associate_single_unit', spec_set=True)
@mock.patch('pulp_python.plugins.models.Package.from_archive')
def test_upload_unit(self, from_archive, mock_associate, mock_rebuild):
"""
Assert correct operation of upload_unit().
"""
package = from_archive.return_value
python_importer = importer.PythonImporter()
repo = mock.MagicMock()
type_id = constants.PACKAGE_TYPE_ID
unit_key = {}
metadata = {}
file_path = '/some/path/1234'
conduit = mock.MagicMock()
config = {}
report = python_importer.upload_unit(repo, type_id, unit_key, metadata, file_path, conduit,
config)
self.assertEqual(report, {'success_flag': True, 'summary': {}, 'details': {}})
from_archive.assert_called_once_with(file_path)
package.save_and_import_content.assert_called_once_with(file_path)
mock_associate.assert_called_once_with(repo.repo_obj, package)
def test_validate_config(self):
"""
There is no config, so we'll just assert that validation passes.
"""
python_importer = importer.PythonImporter()
return_value = python_importer.validate_config(mock.MagicMock(), {})
expected_value = (True, '')
self.assertEqual(return_value, expected_value)
|
gpl-2.0
| -8,726,490,154,107,941,000 | 42.833333 | 100 | 0.641825 | false | 4.00711 | true | false | false |
Aiacos/DevPyLib
|
mayaLib/guiLib/base/baseUI.py
|
1
|
6497
|
__author__ = 'Lorenzo Argentieri'
import inspect
import ast
import mayaLib.pipelineLib.utility.docs as doc
from PySide2 import QtCore, QtWidgets
import pymel.core as pm
def test(a, b, c, d='ciao', e='stronzo', f=1):
"""
Test Function
:param a:
:param b:
:param c:
:param d:
:param e:
:param f:
:return:
"""
print a, b, c, d, e, f
class Prova():
def __init__(self, ciccia, pupu=2048):
print 'Questa e una prova'
def motodo(self):
print 'test method'
class FunctionUI(QtWidgets.QWidget):
def __init__(self, func, parent=None):
super(FunctionUI, self).__init__(parent)
self.function = func
if inspect.isclass(func):
self.sig = inspect.getargspec(func.__init__)
else:
self.sig = inspect.getargspec(func)
self.layout = QtWidgets.QGridLayout()
self.args = self.getParameterList()
self.label_list = []
self.lineedit_list = []
self.fillButton_list = []
row = 0
for arg in self.args:
if arg[0] != 'self':
labelname = QtWidgets.QLabel(arg[0])
if arg[1] != None:
if isinstance(arg[1], bool):
lineedit = QtWidgets.QCheckBox('')
lineedit.setChecked(arg[1])
else:
lineedit = QtWidgets.QLineEdit(str(arg[1]))
fillButton = QtWidgets.QPushButton(">")
else:
lineedit = QtWidgets.QLineEdit("")
fillButton = QtWidgets.QPushButton(">")
self.layout.addWidget(labelname, row, 0)
self.label_list.append(labelname)
if fillButton:
self.layout.addWidget(fillButton, row, 1)
self.fillButton_list.append(fillButton)
self.layout.addWidget(lineedit, row, 2)
self.lineedit_list.append(lineedit)
row = row + 1
self.execButton = QtWidgets.QPushButton("Execute")
self.advancedCheckBox = QtWidgets.QCheckBox("Advanced")
self.advancedCheckBox.setChecked(False)
self.toggleDefaultParameter(False)
self.layout.addWidget(self.execButton, row, 2)
self.layout.addWidget(self.advancedCheckBox, row, 0)
self.doclabel = QtWidgets.QLabel(doc.getDocs(func))
self.layout.addWidget(self.doclabel, row + 1, 2)
self.setLayout(self.layout)
# self.connect(self.execButton, QtCore.Signal("clicked()"), self.execFunction) # Deprecated
self.execButton.clicked.connect(self.execFunction)
self.advancedCheckBox.stateChanged.connect(self.toggleDefaultParameter)
for button in self.fillButton_list:
button.clicked.connect(self.fillWithSelected)
self.setWindowTitle(func.__name__)
self.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint)
self.setSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.MinimumExpanding)
self.setFocus()
def fillWithSelected(self):
button = self.sender()
selection_list = pm.ls(sl=True)
index = self.fillButton_list.index(button)
lineedit = self.lineedit_list[index]
text_list = []
for item in selection_list:
text_list.append(str(item))
lineedit.setText(', '.join(text_list))
def getParameterList(self):
args = self.sig.args
if len(args) == 0:
return []
varargs = self.sig.varargs
keywords = self.sig.keywords
defaults = self.sig.defaults
if not defaults:
defaults = []
argspairs = []
argslen = len(args)
deflen = len(defaults)
counter = 0
defcount = 0
for arg in args:
if counter < (argslen - deflen):
defval = None
else:
defval = defaults[defcount]
defcount = defcount + 1
counter = counter + 1
pair = [arg, defval]
argspairs.append(pair)
return argspairs
# SLOTS
def toggleDefaultParameter(self, defaultvisible=False):
counter = 0
for arg in self.args:
if arg[0] != 'self':
if defaultvisible:
# show
if arg[1] != None:
self.label_list[counter].show()
self.lineedit_list[counter].show()
self.fillButton_list[counter].show()
else:
# hide
if arg[1] != None:
self.label_list[counter].hide()
self.lineedit_list[counter].hide()
self.fillButton_list[counter].hide()
counter = counter + 1
def execFunction(self):
param_list = []
for param in self.lineedit_list:
value = param.text()
if isinstance(param, QtWidgets.QCheckBox):
if param.isChecked():
qCheckBoxValue = True
else:
qCheckBoxValue = False
value = qCheckBoxValue
param_list.append(value)
elif '[' in value and ']' in value:
value = value.replace('[', '').replace(']', '').replace("'", "").replace(' ', '').split(',')
param_list.append(value)
elif value.replace('.', '', 1).isdigit():
value = ast.literal_eval(value)
param_list.append(value)
elif value == 'True':
value = True
param_list.append(value)
elif value == 'False':
value = False
param_list.append(value)
elif value == '':
value = None
param_list.append(value)
elif ', ' in value:
value = value.split(', ')
param_list.append(value)
else:
param_list.append(value)
self.wrapper(param_list)
def wrapper(self, args):
self.function(*args)
if __name__ == "__main__":
# app = QtWidgets.QApplication.instance()
# button = QtWidgets.QPushButton("Hello World")
# button.show()
# app.exec_()
#print inspect.getargspec(Prova)
t = FunctionUI(Prova)
t.show()
|
agpl-3.0
| -574,755,296,216,365,000 | 29.502347 | 108 | 0.52732 | false | 4.3055 | false | false | false |
marco-lancini/Showcase
|
app_socialnetworks/tumblr.py
|
1
|
7041
|
from __init__ import *
from oauthclient import *
class TumblrClient(OauthClient):
"""
Wrapper for Tumblr APIs
:CONSUMER_KEY: Tumblr App ID
:CONSUMER_SECRET: Tumblr API Secret
:blog: the connected Tumblr blog, if any
:user_auth: account of the user on Showcase
:auth: boolean flag (if True, the operation needs to be authenticated)
.. seealso:: :class:`app_socialnetworks.oauthclient.OauthClient`
"""
CONSUMER_KEY = setting('TUMBLR_CONSUMER_KEY')
CONSUMER_SECRET = setting('TUMBLR_CONSUMER_SECRET')
request_token_url = 'http://www.tumblr.com/oauth/request_token'
authorize_url = 'http://www.tumblr.com/oauth/authorize'
access_token_url = 'http://www.tumblr.com/oauth/access_token'
def __init__(self, blog, user_auth=False, auth=False):
"""
Insantiate the client: if authentication is needed, proceed with Oauth; otherwise, use a simple HTTP client
:param blog: the connected Tumblr blog, if any
:type blog: string
:param user_auth: account of the user on Showcase
:type user_auth: `User`
:param auth: flag (if True, the operation needs to be authenticated)
:type auth: boolean
"""
self.blog = blog
self.user_auth = user_auth
self.auth = auth
if self.auth:
# Authentication needed, proceed with Oauth
super(TumblrClient, self).__init__(self.CONSUMER_KEY, self.CONSUMER_SECRET)
else:
# Use a simple HTTP client
self.client = httplib2.Http()
def request_token(self, consumer):
"""
Retrieve the access token of the user from his connected accounts data
"""
# Retrieve connected accounts
connected_accounts = self.user_auth.social_auth.filter(user=self.user_auth.id).filter(provider="tumblr")
if len(connected_accounts) == 0:
raise NotConnectedException('Not Connected to Tumblr')
# Retrieve access_token from socialauth
access_token = connected_accounts[0].extra_data['access_token']
access_token = urlparse.parse_qs(access_token)
oauth_token = access_token['oauth_token'][0]
oauth_token_secret = access_token['oauth_token_secret'][0]
return oauth_token, oauth_token_secret
#=========================================================================
# READ
#=========================================================================
def _query(self, method, optionals=None):
"""
Execute a read-only query
"""
url = "http://api.tumblr.com/v2/blog/%s.tumblr.com/%s?api_key=%s" % (self.blog, method, self.CONSUMER_KEY)
if optionals:
url += optionals
try:
resp, content = self.client.request(url, "GET")
content = json.loads(content)['response']
return content
except:
return None
def get_blog_info(self):
"""
Get general infos about the connected blog
"""
method = "info"
return self._query(method)
def get_blog_posts(self):
"""
Fetch last 5 blog posts
"""
method = "posts"
optionals = "&limit=5"
posts = self._query(method, optionals)
if posts:
posts = posts['posts']
for p in posts:
temp = datetime.strptime(p['date'], "%Y-%m-%d %H:%M:%S GMT")
p['date'] = temp.strftime("%d %B %Y")
return posts
else:
return None
#=========================================================================
# WRITE
#=========================================================================
def _post_blog(self, params, media=None):
"""
Execute a write query
"""
url = 'http://api.tumblr.com/v2/blog/%s.tumblr.com/post' % self.blog
if media:
content = self._postOAuth(url, params)
content = content.read()
else:
body = urllib.urlencode(params)
resp, content = self.client.request(url, "POST", body=body)
# Check response
content = json.loads(content)
response = content['meta']['msg']
if response:
if response != 'Created':
if response == 'Not Authorized':
raise ClearanceException("Not an owned blog")
else:
raise UploadException("Error During Upload: %s" % response)
else:
raise UploadException("Error During Upload: %s" % response)
def add_text(self, title, body):
"""
Add a blog of type: *text*
:param title: title of the blog post
:type title: string
:param body: content of the blog post
:type body: string
"""
params = {'type': 'text', 'title': title, 'body': body}
return self._post_blog(params)
def add_link(self, title, url):
"""
Add a blog of type: *link*
:param title: title of the blog post
:type title: string
:param url: url of the link to publish
:type url: string
"""
params = {'type': 'link', 'title': title, 'url': url}
return self._post_blog(params)
def add_quote(self, quote):
"""
Add a blog of type: *quote*
:param quote: quote to publish
:type quote: string
"""
params = {'type': 'quote', 'quote': quote}
return self._post_blog(params)
def add_chat(self, title, conversation):
"""
Add a blog of type: *chat*
:param title: title of the blog post
:type title: string
:param conversation: conversation to publish
:type conversation: string
"""
params = {'type': 'chat', 'title': title, 'conversation': conversation}
return self._post_blog(params)
def add_photo(self, source, photo):
"""
Add a blog of type: *photo*
:param source: url of the photo to publish, if any
:type source: string
:param photo: photo to upload, if any
:type photo: image file
"""
if source:
params = {'type': 'photo', 'source': source}
return self._post_blog(params)
elif photo:
params = {'type': 'photo', 'data[0]': photo.read()}
return self._post_blog(params, media=True)
def add_audio(self, source):
"""
Add a blog of type: *audio*
:param source: url of the audio file to publish
:type source: string
"""
if source:
params = {'type': 'audio', 'external_url': source}
return self._post_blog(params)
# def add_video(self, video):
# params = {'type': 'video', 'data[0]': video.read()}
# return self._post_blog(params, media=True)
|
mit
| -552,312,351,703,994,600 | 30.017621 | 115 | 0.530464 | false | 4.103147 | false | false | false |
crowsonkb/style_transfer
|
log_utils.py
|
1
|
3838
|
import logging
import os
import sys
try:
import curses
except ImportError:
curses = None
def _stderr_supports_color():
color = False
if curses and hasattr(sys.stderr, 'isatty') and sys.stderr.isatty():
try:
curses.setupterm()
if curses.tigetnum("colors") > 0:
color = True
except Exception:
pass
return color
class LogFormatter(logging.Formatter):
"""Log formatter originally from Tornado and modified."""
DEFAULT_FORMAT = '%(color)s[%(levelname)1.1s %(asctime)s %(process)d]%(end_color)s %(message)s'
DEFAULT_DATE_FORMAT = '%y%m%d %H:%M:%S'
DEFAULT_COLORS = {
logging.DEBUG: 4, # Blue
logging.INFO: 2, # Green
logging.WARNING: 3, # Yellow
logging.ERROR: 1, # Red
}
def __init__(self, color=True, fmt=DEFAULT_FORMAT, datefmt=DEFAULT_DATE_FORMAT,
colors=DEFAULT_COLORS, precision=3):
r"""
:arg bool color: Enables color support.
:arg string fmt: Log message format.
It will be applied to the attributes dict of log records. The
text between ``%(color)s`` and ``%(end_color)s`` will be colored
depending on the level if color support is on.
:arg dict colors: color mappings from logging level to terminal color
code
:arg string datefmt: Datetime format.
Used for formatting ``(asctime)`` placeholder in ``prefix_fmt``.
.. versionchanged:: 3.2
Added ``fmt`` and ``datefmt`` arguments.
"""
super().__init__()
self.default_time_format = datefmt
self.precision = precision
self.default_msec_format = ''
self._fmt = fmt
self._colors = {}
if color and _stderr_supports_color():
fg_color = (curses.tigetstr('setaf') or
curses.tigetstr('setf') or '')
for levelno, code in colors.items():
self._colors[levelno] = curses.tparm(fg_color, code).decode()
self._normal = curses.tigetstr('sgr0').decode()
else:
self._normal = ''
def format(self, record):
record.message = record.getMessage()
record.asctime = self.formatTime(record)
if record.levelno in self._colors:
record.color = self._colors[record.levelno]
record.end_color = self._normal
else:
record.color = record.end_color = ''
formatted = self._fmt % record.__dict__
if record.exc_info:
if not record.exc_text:
record.exc_text = self.formatException(record.exc_info)
if record.exc_text:
lines = [formatted.rstrip()]
lines.extend(ln for ln in record.exc_text.split('\n'))
formatted = '\n'.join(lines)
return formatted.replace('\n', '\n ')
def formatTime(self, record, datefmt=None):
if not datefmt:
datefmt = self.default_time_format
fmttime = super().formatTime(record, datefmt)
if self.precision >= 4:
return '%s.%06d' % (fmttime, record.msecs*1000)
if self.precision >= 1:
return '%s.%03d' % (fmttime, record.msecs)
return fmttime
def setup_logger(name=None, level=None, formatter_opts=None):
"""Sets up pretty logging using LogFormatter."""
if formatter_opts is None:
formatter_opts = {}
logging.captureWarnings(True)
logger = logging.getLogger(name)
if 'DEBUG' in os.environ:
level = logging.DEBUG
elif level is None:
level = logging.INFO
logger.setLevel(level)
channel = logging.StreamHandler()
formatter = LogFormatter(**formatter_opts)
channel.setFormatter(formatter)
logger.addHandler(channel)
return logger
|
mit
| 4,683,264,071,271,343,000 | 32.964602 | 99 | 0.585201 | false | 4.052798 | false | false | false |
itu-oss-project-team/oss-github-analysis-project
|
github_analysis_tool/analyzer/commit_based_analyzer.py
|
1
|
2535
|
import collections
from github_analysis_tool.analyzer.abstract_analyzer import AbstractAnalyzer
from github_analysis_tool.services.db_column_constants import Columns
class CommitBasedAnalyzer(AbstractAnalyzer):
def __init__(self):
AbstractAnalyzer.__init__(self, "commit")
def create_matrix(self, repo_id):
commit_matrix = collections.OrderedDict() # {<commit1>:{<commit2>:<shared_file_changes>}
commit_file_counts = collections.OrderedDict() # {<commit>:<file_count>}
repo_files = self._databaseService.get_files_of_repo(repo_id, get_only_file_paths=True)
# For every file in repo
for file_name in repo_files:
commits_of_file = self._databaseService.get_commits_of_file(repo_id, file_name, get_only_ids=True)
for commit in commits_of_file:
# Count how many files are there in each commit so we can normalize our matrix later with these counts
self.__increment_commit_file_count(commit_file_counts, commit)
for commit_1 in commits_of_file:
for commit_2 in commits_of_file:
# For every commit pair that edits this same file
self.__increment_file_count(commit_matrix, commit_1, commit_2)
self.__normalize_matrix(commit_matrix, commit_file_counts)
return commit_matrix
def __increment_file_count(self, commit_matrix, commit_1, commit_2):
if commit_1 == commit_2:
return
if commit_1 not in commit_matrix:
commit_matrix[commit_1] = {}
if commit_2 in commit_matrix[commit_1]:
commit_matrix[commit_1][commit_2] += 1
else:
commit_matrix[commit_1][commit_2] = 1
def __increment_commit_file_count(self, commit_file_counts, commit):
if commit not in commit_file_counts:
commit_file_counts[commit] = 1
else:
commit_file_counts[commit] += 1
def __normalize_matrix(self, commit_matrix, commit_file_counts):
for commit_1 in commit_matrix.keys():
for commit_2 in commit_matrix.keys():
if commit_2 not in commit_matrix[commit_1]:
continue
intersectCount = commit_matrix[commit_1][commit_2]
unionCount = commit_file_counts[commit_1] + commit_file_counts[commit_2] - intersectCount
test = intersectCount / unionCount
commit_matrix[commit_1][commit_2] = intersectCount / unionCount
|
mit
| -7,743,140,120,545,697,000 | 43.473684 | 118 | 0.624852 | false | 3.998423 | false | false | false |
DiamondLightSource/diffcalc
|
diffcalc/gdasupport/you.py
|
1
|
4367
|
from diffcalc.gdasupport.scannable.diffractometer import DiffractometerScannableGroup
from diffcalc.gdasupport.scannable.hkl import Hkl
from diffcalc.gdasupport.scannable.hkloffset import HklOffset
from diffcalc.gdasupport.scannable.simulation import SimulatedCrystalCounter
from diffcalc.gdasupport.scannable.wavelength import Wavelength
from diffcalc.gdasupport.scannable.parameter import DiffractionCalculatorParameter
from diffcalc.dc import dcyou as _dc
from diffcalc.dc.help import format_command_help
from diffcalc.gdasupport.scannable.sr2 import Sr2
from diffcalc.gdasupport.scannable.qtrans import Qtrans
reload(_dc)
from diffcalc.dc.dcyou import * # @UnusedWildImport
from diffcalc import settings
try:
import gda # @UnusedImport @UnresolvedImport
GDA = True
except:
GDA = False
if not GDA:
from diffcalc.gdasupport.minigda import command
_pos = command.Pos()
_scan = command.Scan(command.ScanDataPrinter())
def pos(*args):
"""
pos show position of all Scannables
pos scn show position of scn
pos scn targetmove scn to target (a number)
"""
return _pos(*args)
def scan(*args):
"""
scan scn start stop step {scn {target}} {det t}
"""
return _scan(*args)
from diffcalc.gdasupport.scannable.sim import sim # @UnusedImport
_scn_group = settings.axes_scannable_group
_diff_scn_name = settings.geometry.name # @UndefinedVariable
_energy_scannable = settings.energy_scannable
# Create diffractometer scannable
_diff_scn = DiffractometerScannableGroup(_diff_scn_name, _dc, _scn_group)
globals()[_diff_scn_name] = _diff_scn
# Create hkl scannables
hkl = Hkl('hkl', _scn_group, _dc)
h = hkl.h
k = hkl.k
l = hkl.l
hkloffset = HklOffset('hkloffset', _scn_group, _dc)
h_offset = hkloffset.h
k_offset = hkloffset.k
l_offset = hkloffset.l
pol_offset = hkloffset.polar
az_offset = hkloffset.azimuthal
sr2 = Sr2('sr2', _scn_group, _dc)
qtrans = Qtrans('qtrans', _scn_group, _dc)
Hkl.dynamic_docstring = format_command_help(hkl_commands_for_help) # must be on the class
ub.__doc__ = format_command_help(ub_commands_for_help)
if settings.include_reference:
_virtual_angles = ('theta', 'ttheta', 'qaz', 'alpha', 'naz', 'tau', 'psi', 'beta', 'betain', 'betaout')
else:
_virtual_angles = ('theta', 'ttheta', 'qaz', 'betain', 'betaout')
hklverbose = Hkl('hklverbose', _scn_group, _dc, _virtual_angles)
# Create wavelength scannable
wl = Wavelength(
'wl', _energy_scannable, settings.energy_scannable_multiplier_to_get_KeV)
if not GDA:
wl.asynchronousMoveTo(1) # Angstrom
_energy_scannable.level = 3
wl.level = 3
# Create simulated counter timer
ct = SimulatedCrystalCounter('ct', _scn_group, settings.geometry, wl)
ct.level = 10
# Create constraint scannables
def _create_constraint_scannable(con_name, scn_name=None):
if not scn_name:
scn_name = con_name
return DiffractionCalculatorParameter(
scn_name, con_name, _dc.constraint_manager)
# Detector constraints
def isconstrainable(name):
return not constraint_manager.is_constraint_fixed(name)
if isconstrainable('delta'): delta_con = _create_constraint_scannable('delta', 'delta_con')
if isconstrainable('gam'): gam_con = _create_constraint_scannable('gam', 'gam_con')
if isconstrainable('qaz'): qaz = _create_constraint_scannable('qaz')
if isconstrainable('naz'): naz = _create_constraint_scannable('naz')
# Reference constraints
if settings.include_reference:
alpha = _create_constraint_scannable('alpha')
beta = _create_constraint_scannable('beta')
psi = _create_constraint_scannable('psi')
a_eq_b = 'a_eq_b'
betain = _create_constraint_scannable('betain')
betaout = _create_constraint_scannable('betaout')
bin_eq_bout = 'bin_eq_bout'
# Sample constraints
if isconstrainable('mu'): mu_con = _create_constraint_scannable('mu', 'mu_con')
if isconstrainable('eta'): eta_con = _create_constraint_scannable('eta', 'eta_con')
if isconstrainable('chi'): chi_con = _create_constraint_scannable('chi', 'chi_con')
if isconstrainable('phi'): phi_con = _create_constraint_scannable('phi', 'phi_con')
if isconstrainable('mu') and isconstrainable('gam'): mu_is_gam = 'mu_is_gam'
omega = _create_constraint_scannable('omega')
bisect = 'bisect'
# Cleanup other cruft
del format_command_help
|
gpl-3.0
| -3,793,663,831,858,863,000 | 32.083333 | 107 | 0.715365 | false | 2.950676 | false | false | false |
kubeflow/tf-operator
|
sdk/python/kubeflow/tfjob/models/v1_tf_job.py
|
1
|
7910
|
# Copyright 2019 kubeflow.org.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
tfjob
Python SDK for TF-Operator # noqa: E501
OpenAPI spec version: v0.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client import V1ObjectMeta # noqa: F401,E501
from kubeflow.tfjob.models.v1_job_status import V1JobStatus # noqa: F401,E501
from kubeflow.tfjob.models.v1_tf_job_spec import V1TFJobSpec # noqa: F401,E501
class V1TFJob(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'api_version': 'str',
'kind': 'str',
'metadata': 'V1ObjectMeta',
'spec': 'V1TFJobSpec',
'status': 'V1JobStatus'
}
attribute_map = {
'api_version': 'apiVersion',
'kind': 'kind',
'metadata': 'metadata',
'spec': 'spec',
'status': 'status'
}
def __init__(self, api_version=None, kind=None, metadata=None, spec=None, status=None): # noqa: E501
"""V1TFJob - a model defined in Swagger""" # noqa: E501
self._api_version = None
self._kind = None
self._metadata = None
self._spec = None
self._status = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
if spec is not None:
self.spec = spec
if status is not None:
self.status = status
@property
def api_version(self):
"""Gets the api_version of this V1TFJob. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources # noqa: E501
:return: The api_version of this V1TFJob. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this V1TFJob.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this V1TFJob. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def kind(self):
"""Gets the kind of this V1TFJob. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this V1TFJob. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V1TFJob.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this V1TFJob. # noqa: E501
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""Gets the metadata of this V1TFJob. # noqa: E501
Standard Kubernetes object's metadata. # noqa: E501
:return: The metadata of this V1TFJob. # noqa: E501
:rtype: V1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this V1TFJob.
Standard Kubernetes object's metadata. # noqa: E501
:param metadata: The metadata of this V1TFJob. # noqa: E501
:type: V1ObjectMeta
"""
self._metadata = metadata
@property
def spec(self):
"""Gets the spec of this V1TFJob. # noqa: E501
Specification of the desired state of the TFJob. # noqa: E501
:return: The spec of this V1TFJob. # noqa: E501
:rtype: V1TFJobSpec
"""
return self._spec
@spec.setter
def spec(self, spec):
"""Sets the spec of this V1TFJob.
Specification of the desired state of the TFJob. # noqa: E501
:param spec: The spec of this V1TFJob. # noqa: E501
:type: V1TFJobSpec
"""
self._spec = spec
@property
def status(self):
"""Gets the status of this V1TFJob. # noqa: E501
Most recently observed status of the TFJob. Read-only (modified by the system). # noqa: E501
:return: The status of this V1TFJob. # noqa: E501
:rtype: V1JobStatus
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this V1TFJob.
Most recently observed status of the TFJob. Read-only (modified by the system). # noqa: E501
:param status: The status of this V1TFJob. # noqa: E501
:type: V1JobStatus
"""
self._status = status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(V1TFJob, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1TFJob):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
apache-2.0
| 2,290,367,220,663,842,300 | 31.024291 | 295 | 0.604172 | false | 3.927507 | false | false | false |
Mossman1215/pgp-backup
|
upload_dir_instructions.py
|
1
|
2112
|
import os,sys,json,random
class uploadTest:
def __init__(self):
self.source = sys.argv[1]
self.f = open("operations.txt",'w',1)
self.counter = 0
print('running')
self.mapping = dict()
def getParentID(self,filename):
#search the dictionary for the filename
if(filename in self.mapping):
return self.mapping[filename]
else:
return -1
def getNewID(self):
self.counter += 1
return self.counter
def getRandomID(self):
return random.randrange(0,1000,1)
def run(self):
print(self.number_of_operations())
for root,subdirs,files in os.walk(self.source, topdown=True):
#store the root id
title = os.path.basename(root)
identifier = self.getNewID()
pID = self.getParentID(title)
if(pID == -1):
pID = self.getRandomID()
self.f.write(title+','+str(identifier)+','+str(pID)+'\n')
for subdir in subdirs:
subName = os.path.basename(subdir)
self.mapping[subName] = identifier
for fi in files:
filefolder = os.path.basename(fi)
fileID = self.getRandomID()
self.f.write(filefolder+','+str(fileID)+','+str(identifier)+'\n')
self.f.write('\n')
print('complete')
self.f.close()
def number_of_operations(self):
count = 0
for root,subdirs,files in os.walk(self.source, topdown=True):
count+=1
count= count + len(files)
return count
if(__name__ == '__main__'):
var = uploadTest()
var.run()
|
mit
| -2,677,422,769,158,516,000 | 43 | 97 | 0.432292 | false | 4.778281 | false | false | false |
Fe-Nik-S/Examples
|
python/external_api/Sencore/lib/const.py
|
1
|
4426
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from collections import namedtuple
path_attr = namedtuple("path_attr", ("PATH", "ATTR"))
__ALL__ = ["SENCORE_URLS_ENUM", "SENCORE_DATA_PATH", "SENCORE_ATTRS_ENUM", "RESULT_PATH"]
class SENCORE_URLS_ENUM(object):
ETH = "http://{address}/probe/ethdata"
ETR_ALL = "http://{address}/probe/etrdata?&&"
ETR = "http://{address}/probe/etrdata?inputId={input_id}&tuningSetupId={ts_index}"
GENERAL = "http://{address}/probe/generaldata?&&"
STATUS = "http://{address}/probe/status"
REFERENCE_BASE_URL = "https://mgmt.hq.ertelecom.ru/chtp/api/dir/%7B%22rec_type_id%22:2020%7D"
TDT_TIME = "http://{address}/probe/data/AnaParseTable?inputId=1&etrEngineNo=0&pid=0&tid=112&tidExtension=-1"
QAM_TIME = "http://{address}/probe/data/AnaParseTable?inputId=1&etrEngineNo=0&pid=20&tid=112&tidExtension=-1"
ETH_TIME = "http://{address}/probe/data/AnaParseTable?inputId=100&etrEngineNo=0&pid=20&tid=112&tidExtension=-1"
class SENCORE_ATTRS_ENUM(object):
__slots__ = ("ETH", "REQUIRED_PIDS")
ETH = ("bitrate", "name", "index", "cc_errors", "net_bitrate", "iat_avg")
REQUIRED_PIDS = {"0", "1", "16", "17", "18", "20", "89", "99"}
class SENCORE_DATA_PATH(object):
__slots__ = (
"ETH_TSS", "ETR_PIDS", "ETR_PIDS", "ETR_SERVICES", "ETR_CHECKS",
"GENERAL_VERSION", "GENERAL_MLERROR", "GENERAL_UPTIME",
"STATUS_CPU_TEMP", "STATUS_PORT", "STATUS_FREE_DISC",
"STATUS_STATUS_FREE_MEM", "STATUS_TIME"
)
ETH_TSS = path_attr(**{
"PATH": "EthExportData,streams,mon",
"ATTR": ("bitrate", "name", "index", "cc_errors",
"net_bitrate", "iat_avg", "dst_addr")
})
ETR_PIDS = path_attr(**{
"PATH": "Etr290ExportData,input,tuningSetup,pidList,pid",
"ATTR": ("id", "bitrate", ("max_bitrate", "maxBitrate"),
("min_bitrate", "minBitrate"), ("num_cc_errors", "numCcErrors"), "scrambled")
})
ETR_SERVICES = path_attr(**{
"PATH": "Etr290ExportData,input,tuningSetup,serviceList,service",
"ATTR": ("id", "name", "bitrate", "scrambled", "symbolrate")
})
ETR_CHECKS = path_attr(**{
"PATH": "Etr290ExportData,input,tuningSetup,etrList,group",
"ATTR": ()
})
GENERAL_VERSION = path_attr(**{
"PATH": "GeneralProbeExportData,release",
"ATTR": "version"
})
GENERAL_UPTIME = path_attr(**{
"PATH": "GeneralProbeExportData,internet,mgmt,mib2,system,sysUpTime",
"ATTR": "uptime"
})
GENERAL_MLERROR = path_attr(**{
"PATH": ("GeneralProbeExportData,internet,private,"
"enterprise,bridgetech,mlrerrTable,row"),
"ATTR": ("mlrerr1m", )
})
GENERAL_MW = path_attr(**{
"PATH": ("GeneralProbeExportData,internet,private,"
"enterprise,bridgetech,mwTable,row"),
"ATTR": ("iatPeak1m", "mlrSum1m")
})
GENERAL_CHANNEL = path_attr(**{
"PATH": ("GeneralProbeExportData,internet,private,"
"enterprise,bridgetech,channelTable,row"),
"ATTR": ("chindex", )
})
STATUS_TIME = path_attr(**{
"PATH": "Status,System,time",
"ATTR": "time"
})
STATUS_CPU_TEMP = path_attr(**{
"PATH": "Status,System,cpu_temp",
"ATTR": "cpu_temp"
})
STATUS_FREE_MEM = path_attr(**{
"PATH": "Status,Resources,ram_free",
"ATTR": "free_mem"
})
STATUS_FREE_DISC = path_attr(**{
"PATH": "Status,Resources,disk_free",
"ATTR": "free_disc"
})
STATUS_PORT = "Status,Interfaces,Fixed,Data,status"
ETR_INPUTS = path_attr(**{
"PATH": "Etr290ExportData,input",
"ATTR": (("current_bitrate", "effectiveBitrate"),
("min_signal_level", "minSignalLevel"),
("max_signal_level", "maxSignalLevel"),
("max_centre_frequency_offset", "maxCentreFrequencyOffset"),
("max_current_bitrate", "maximumEffectiveBitrate"), "id",
"name", "description", "symbolrate", "minSnr", "minMer",
("symbolrate_offset", "maxSymbolRateOffset")
)
})
TEMP_PATH = "/tmp/send2zabbix/"
AUTODISCOVERY_SEND_PERIOD = 12
AUTODISCOVERY_SEND_TIMESTAMP = "timestamp"
TIMEOUT = 900
URL_REQUEST_TIMEOUT = 10
TIME_SERVER = ''
ZABBIX_HOST = ""
ZABBIX_PORT = "10051"
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
|
mit
| -8,902,507,808,757,745,000 | 33.310078 | 115 | 0.584049 | false | 3.127915 | false | false | false |
8l/beri
|
cheritest/trunk/tests/trace/test_raw_trace.py
|
2
|
2042
|
#-
# Copyright (c) 2013 Colin Rothwell
# All rights reserved.
#
# This software was developed by and Colin Rothwell as part of his summer
# internship.
#
# @BERI_LICENSE_HEADER_START@
#
# Licensed to BERI Open Systems C.I.C. (BERI) under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. BERI licenses this
# file to you under the BERI Hardware-Software License, Version 1.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at:
#
# http://www.beri-open-systems.org/legal/license-1-0.txt
#
# Unless required by applicable law or agreed to in writing, Work distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# @BERI_LICENSE_HEADER_END@
#
import sys
from beritest_tools import BaseBERITestCase
def read_trace_records(trace_file_name, record_count, record_width=32):
with open(trace_file_name, 'rb') as trace_file:
return trace_file.read(record_count * record_width)
class test_raw_trace(BaseBERITestCase):
def test_uncached(self):
'''Test trace from uncached memory is as expected'''
actual = read_trace_records('log/test_raw_trace.trace', 5)
expected = read_trace_records('tests/trace/uncached_expected.trace', 5)
self.assertEqual(actual, expected, 'Uncached trace mismatch. Use the '
'readtrace program to debug.')
def test_cached(self):
'''Test trace from cached memory is as expected'''
actual = read_trace_records('log/test_raw_trace_cached.trace', 7)
expected = read_trace_records('tests/trace/cached_expected.trace', 7)
self.assertEqual(actual, expected, 'Cached trace mismatch. Use the '
'readtrace program to debug.')
|
apache-2.0
| 5,793,459,442,878,046,000 | 41.541667 | 79 | 0.711557 | false | 3.726277 | true | false | false |
bwohlberg/sporco
|
sporco/admm/tvl1.py
|
1
|
24507
|
# -*- coding: utf-8 -*-
# Copyright (C) 2015-2020 by Brendt Wohlberg <[email protected]>
# All rights reserved. BSD 3-clause License.
# This file is part of the SPORCO package. Details of the copyright
# and user license can be found in the 'LICENSE.txt' file distributed
# with the package.
r"""Classes for ADMM algorithms for Total Variation (TV) optimisation
with an :math:`\ell_1` data fidelity term"""
from __future__ import division, absolute_import
import copy
import numpy as np
from sporco.admm import admm
from sporco.array import zpad, atleast_nd, zdivide
from sporco.fft import real_dtype, fftn_func, ifftn_func
from sporco.signal import gradient_filters, grad, gradT
from sporco.linalg import rrs
from sporco.prox import prox_l1, prox_l2
__author__ = """Brendt Wohlberg <[email protected]>"""
class TVL1Denoise(admm.ADMM):
r"""ADMM algorithm for :math:`\ell_1`-TV denoising problem
:cite:`alliney-1992-digital` :cite:`esser-2010-primal` (Sec. 2.4.4).
Solve the optimisation problem
.. math::
\mathrm{argmin}_\mathbf{x} \;
\| W_{\mathrm{df}} (\mathbf{x} - \mathbf{s}) \|_1 +
\lambda \left\| W_{\mathrm{tv}} \sqrt{(G_r \mathbf{x})^2 +
(G_c \mathbf{x})^2} \right\|_1
via the ADMM problem
.. math::
\mathrm{argmin}_{\mathbf{x},\mathbf{y}_d,\mathbf{y}_r,\mathbf{y}_c} \;
(1/2) \| W_{\mathrm{df}} \mathbf{y}_d \|_1 +
\lambda \left\| W_{\mathrm{tv}} \sqrt{(\mathbf{y}_r)^2 +
(\mathbf{y}_c)^2} \right\|_1 \;\text{such that}\;
\left( \begin{array}{c} G_r \\ G_c \\ I \end{array} \right)
\mathbf{x} - \left( \begin{array}{c} \mathbf{y}_r \\
\mathbf{y}_c \\ \mathbf{y}_d \end{array}
\right) = \left( \begin{array}{c} \mathbf{0} \\ \mathbf{0} \\
\mathbf{s} \end{array} \right) \;\;,
where :math:`G_r` and :math:`G_c` are gradient operators along array
rows and columns respectively, and :math:`W_{\mathrm{df}}` and
:math:`W_{\mathrm{tv}}` are diagonal weighting matrices.
While these equations describe the default behaviour of regularisation
in two dimensions, this class supports an arbitrary number of
dimensions. For example, for 3D TV regularisation in a 3D array,
the object should be initialised with parameter `axes` set to
`(0, 1, 2)`.
After termination of the :meth:`solve` method, attribute :attr:`itstat`
is a list of tuples representing statistics of each iteration. The
fields of the named tuple ``IterationStats`` are:
``Iter`` : Iteration number
``ObjFun`` : Objective function value
``DFid`` : Value of data fidelity term :math:`\|
W_{\mathrm{df}} (\mathbf{x} - \mathbf{s}) \|_1`
``RegTV`` : Value of regularisation term :math:`\|
W_{\mathrm{tv}} \sqrt{(G_r \mathbf{x})^2 + (G_c \mathbf{x})^2}
\|_1`
``PrimalRsdl`` : Norm of primal residual
``DualRsdl`` : Norm of dual residual
``EpsPrimal`` : Primal residual stopping tolerance
:math:`\epsilon_{\mathrm{pri}}`
``EpsDual`` : Dual residual stopping tolerance
:math:`\epsilon_{\mathrm{dua}}`
``Rho`` : Penalty parameter
``GSIter`` : Number of Gauss-Seidel iterations
``GSRelRes`` : Relative residual of Gauss-Seidel solution
``Time`` : Cumulative run time
"""
class Options(admm.ADMM.Options):
"""TVL1Denoise algorithm options
Options include all of those defined in
:class:`sporco.admm.admm.ADMM.Options`, together with
additional options:
``gEvalY`` : Flag indicating whether the :math:`g` component
of the objective function should be evaluated using variable
Y (``True``) or X (``False``) as its argument.
``MaxGSIter`` : Maximum Gauss-Seidel iterations.
``GSTol`` : Gauss-Seidel stopping tolerance.
``DFidWeight`` : Data fidelity weight matrix.
``TVWeight`` : TV term weight matrix.
"""
defaults = copy.deepcopy(admm.ADMM.Options.defaults)
defaults.update({'gEvalY': True, 'RelaxParam': 1.8,
'DFidWeight': 1.0, 'TVWeight': 1.0,
'GSTol': 0.0, 'MaxGSIter': 2
})
defaults['AutoRho'].update({'Enabled': False, 'Period': 1,
'AutoScaling': True, 'Scaling': 1000.0,
'RsdlRatio': 1.2})
def __init__(self, opt=None):
"""
Parameters
----------
opt : dict or None, optional (default None)
TVL1Denoise algorithm options
"""
if opt is None:
opt = {}
admm.ADMM.Options.__init__(self, opt)
if self['AutoRho', 'RsdlTarget'] is None:
self['AutoRho', 'RsdlTarget'] = 1.0
itstat_fields_objfn = ('ObjFun', 'DFid', 'RegTV')
itstat_fields_extra = ('GSIter', 'GSRelRes')
hdrtxt_objfn = ('Fnc', 'DFid', 'RegTV')
hdrval_objfun = {'Fnc': 'ObjFun', 'DFid': 'DFid', 'RegTV': 'RegTV'}
def __init__(self, S, lmbda, opt=None, axes=(0, 1), caxis=None):
"""
|
**Call graph**
.. image:: ../_static/jonga/tvl1den_init.svg
:width: 20%
:target: ../_static/jonga/tvl1den_init.svg
|
Parameters
----------
S : array_like
Signal vector or matrix
lmbda : float
Regularisation parameter
opt : TVL1Denoise.Options object
Algorithm options
axes : tuple, optional (default (0, 1))
Axes on which TV regularisation is to be applied
caxis : int or None, optional (default None)
Axis on which channels of a multi-channel image are stacked.
If None, TV regularisation is applied indepdendently to each
channel, otherwise Vector TV :cite:`blomgren-1998-color`
regularisation is applied jointly to all channels.
"""
if opt is None:
opt = TVL1Denoise.Options()
# Set flag indicating whether problem involves real or complex
# values
self.real_dtype = np.isrealobj(S)
# Set dtype attribute based on S.dtype and opt['DataType']
self.set_dtype(opt, S.dtype)
self.S = np.asarray(S, dtype=self.dtype)
self.axes = axes
if caxis is None:
self.saxes = (-1,)
else:
self.saxes = (caxis, -1)
self.lmbda = real_dtype(self.dtype).type(lmbda)
# Set penalty parameter
self.set_attr('rho', opt['rho'], dval=(2.0*self.lmbda + 0.1),
dtype=real_dtype(self.dtype))
yshape = S.shape + (len(axes)+1,)
super(TVL1Denoise, self).__init__(S.size, yshape, yshape, S.dtype, opt)
self.Wdf = np.asarray(self.opt['DFidWeight'],
dtype=real_dtype(self.dtype))
self.lcw = self.LaplaceCentreWeight()
self.Wtv = np.asarray(self.opt['TVWeight'],
dtype=real_dtype(self.dtype))
if hasattr(self.Wtv, 'ndim') and self.Wtv.ndim == S.ndim:
self.Wtvna = self.Wtv[..., np.newaxis]
else:
self.Wtvna = self.Wtv
# Need to initialise X because of Gauss-Seidel in xstep
self.X = self.S
def uinit(self, ushape):
"""Return initialiser for working variable U."""
if self.opt['Y0'] is None:
return np.zeros(ushape, dtype=self.dtype)
else:
# If initial Y is non-zero, initial U is chosen so that
# the relevant dual optimality criterion (see (3.10) in
# boyd-2010-distributed) is satisfied.
Yss = np.sqrt(np.sum(self.Y[..., 0:-1]**2, axis=self.S.ndim,
keepdims=True))
U0 = (self.lmbda/self.rho)*zdivide(self.Y[..., 0:-1], Yss)
U1 = (1.0 / self.rho)*np.sign(self.Y[..., -1:])
return np.concatenate((U0, U1), axis=self.S.ndim)
def xstep(self):
r"""Minimise Augmented Lagrangian with respect to
:math:`\mathbf{x}`.
"""
ngsit = 0
gsrrs = np.inf
YU = self.Y - self.U
SYU = self.S + YU[..., -1]
YU[..., -1] = 0.0
ATYU = self.cnst_AT(YU)
while gsrrs > self.opt['GSTol'] and ngsit < self.opt['MaxGSIter']:
self.X = self.GaussSeidelStep(
SYU, self.X, ATYU, 1.0, self.lcw, 1.0)
gsrrs = rrs(
self.cnst_AT(self.cnst_A(self.X)),
self.cnst_AT(self.cnst_c() - self.cnst_B(self.Y) - self.U)
)
ngsit += 1
self.xs = (ngsit, gsrrs)
def ystep(self):
r"""Minimise Augmented Lagrangian with respect to
:math:`\mathbf{y}`.
"""
self.Y[..., 0:-1] = prox_l2(
self.AX[..., 0:-1] + self.U[..., 0:-1],
(self.lmbda/self.rho)*self.Wtvna, axis=self.saxes)
self.Y[..., -1] = prox_l1(
self.AX[..., -1] + self.U[..., -1] - self.S,
(1.0/self.rho)*self.Wdf)
def obfn_gvar(self):
"""Variable to be evaluated in computing regularisation term,
depending on 'gEvalY' option value.
"""
if self.opt['gEvalY']:
return self.Y
else:
return self.cnst_A(self.X) - self.cnst_c()
def eval_objfn(self):
r"""Compute components of objective function as well as total
contribution to objective function. Data fidelity term is
:math:`(1/2) \| \mathbf{x} - \mathbf{s} \|_2^2` and
regularisation term is :math:`\| W_{\mathrm{tv}}
\sqrt{(G_r \mathbf{x})^2 + (G_c \mathbf{x})^2}\|_1`.
"""
if self.real_dtype:
gvr = self.obfn_gvar()
else:
gvr = np.abs(self.obfn_gvar())
dfd = np.sum(np.abs(self.Wdf * gvr[..., -1]))
reg = np.sum(self.Wtv * np.sqrt(np.sum(gvr[..., 0:-1]**2,
axis=self.saxes)))
obj = dfd + self.lmbda*reg
return (obj, dfd, reg)
def itstat_extra(self):
"""Non-standard entries for the iteration stats record tuple."""
return (self.xs[0], self.xs[1])
def cnst_A(self, X):
r"""Compute :math:`A \mathbf{x}` component of ADMM problem
constraint. In this case :math:`A \mathbf{x} = (G_r^T \;\; G_c^T
\;\; I)^T \mathbf{x}`.
"""
return np.concatenate(
[grad(X, ax)[..., np.newaxis] for ax in self.axes] +
[X[..., np.newaxis],], axis=X.ndim)
def cnst_AT(self, X):
r"""Compute :math:`A^T \mathbf{x}` where :math:`A \mathbf{x}` is
a component of ADMM problem constraint. In this case
:math:`A^T \mathbf{x} = (G_r^T \;\; G_c^T \;\; I) \mathbf{x}`.
"""
return np.sum(np.concatenate(
[gradT(X[..., ax], ax)[..., np.newaxis] for ax in self.axes] +
[X[..., -1:],], axis=X.ndim-1), axis=X.ndim-1)
def cnst_B(self, Y):
r"""Compute :math:`B \mathbf{y}` component of ADMM problem
constraint. In this case :math:`B \mathbf{y} = -\mathbf{y}`.
"""
return -Y
def cnst_c(self):
r"""Compute constant component :math:`\mathbf{c}` of ADMM problem
constraint. In this case :math:`\mathbf{c} = (\mathbf{0} \;\;
\mathbf{0} \;\; \mathbf{s})`.
"""
c = np.zeros(self.S.shape + (len(self.axes)+1,), self.dtype)
c[..., -1] = self.S
return c
def rsdl_s(self, Yprev, Y):
"""Compute dual residual vector."""
return self.rho*np.linalg.norm(self.cnst_AT(self.U))
def rsdl_sn(self, U):
"""Compute dual residual normalisation term."""
return self.rho*np.linalg.norm(U)
def LaplaceCentreWeight(self):
"""Centre weighting matrix for TV Laplacian."""
sz = [1,] * self.S.ndim
for ax in self.axes:
sz[ax] = self.S.shape[ax]
lcw = 2*len(self.axes)*np.ones(sz, dtype=self.dtype)
for ax in self.axes:
lcw[(slice(None),)*ax + ([0, -1],)] -= 1.0
return lcw
def GaussSeidelStep(self, S, X, ATYU, rho, lcw, W2):
"""Gauss-Seidel step for linear system in TV problem."""
Xss = np.zeros_like(S, dtype=self.dtype)
for ax in self.axes:
Xss += zpad(X[(slice(None),)*ax + (slice(0, -1),)], (1, 0), ax)
Xss += zpad(X[(slice(None),)*ax + (slice(1, None),)],
(0, 1), ax)
return (rho*(Xss + ATYU) + W2*S) / (W2 + rho*lcw)
class TVL1Deconv(admm.ADMM):
r"""ADMM algorithm for :math:`\ell_1`-TV deconvolution problem.
Solve the optimisation problem
.. math::
\mathrm{argmin}_\mathbf{x} \;
\| W_{\mathrm{df}} (H \mathbf{x} - \mathbf{s}) \|_1 +
\lambda \left\| W_{\mathrm{tv}} \sqrt{(G_r \mathbf{x})^2 +
(G_c \mathbf{x})^2} \right\|_1 \;\;,
where :math:`H` denotes the linear operator corresponding to a
convolution, :math:`G_r` and :math:`G_c` are gradient operators
along array rows and columns respectively, and
:math:`W_{\mathrm{df}}` and :math:`W_{\mathrm{tv}}` are diagonal
weighting matrices, via the ADMM problem
.. math::
\mathrm{argmin}_{\mathbf{x},\mathbf{y}_d,\mathbf{y}_r,\mathbf{y}_c} \;
(1/2) \| W_{\mathrm{df}} \mathbf{y}_d \|_1 +
\lambda \left\| W_{\mathrm{tv}} \sqrt{(\mathbf{y}_r)^2 +
(\mathbf{y}_c)^2} \right\|_1 \;\text{such that}\;
\left( \begin{array}{c} G_r \\ G_c \\ H \end{array} \right)
\mathbf{x} - \left( \begin{array}{c} \mathbf{y}_r \\
\mathbf{y}_c \\ \mathbf{y}_d \end{array}
\right) = \left( \begin{array}{c} \mathbf{0} \\ \mathbf{0} \\
\mathbf{s} \end{array} \right) \;\;.
While these equations describe the default behaviour of regularisation
in two dimensions, this class supports an arbitrary number of
dimensions. For example, for 3D TV regularisation in a 3D array,
the object should be initialised with parameter `axes` set to
`(0, 1, 2)`.
Note that the convolution is implemented in the frequency domain,
having the same phase offset as :func:`.fftconv`, which differs from
that of :func:`scipy.ndimage.convolve` with the default ``origin``
parameter.
After termination of the :meth:`solve` method, attribute :attr:`itstat`
is a list of tuples representing statistics of each iteration. The
fields of the named tuple ``IterationStats`` are:
``Iter`` : Iteration number
``ObjFun`` : Objective function value
``DFid`` : Value of data fidelity term :math:`\|
W_{\mathrm{df}} (H \mathbf{x} - \mathbf{s}) \|_1`
``RegTV`` : Value of regularisation term :math:`\|
W_{\mathrm{tv}} \sqrt{(G_r \mathbf{x})^2 + (G_c \mathbf{x})^2}
\|_1`
``PrimalRsdl`` : Norm of primal residual
``DualRsdl`` : Norm of dual residual
``EpsPrimal`` : Primal residual stopping tolerance
:math:`\epsilon_{\mathrm{pri}}`
``EpsDual`` : Dual residual stopping tolerance
:math:`\epsilon_{\mathrm{dua}}`
``Rho`` : Penalty parameter
``XSlvRelRes`` : Relative residual of X step solver
``Time`` : Cumulative run time
"""
class Options(admm.ADMM.Options):
"""TVL1Deconv algorithm options
Options include all of those defined in
:class:`sporco.admm.admm.ADMM.Options`, together with
additional options:
``gEvalY`` : Flag indicating whether the :math:`g` component
of the objective function should be evaluated using variable
Y (``True``) or X (``False``) as its argument.
``LinSolveCheck`` : If ``True``, compute relative residual of
X step solver.
``DFidWeight`` : Data fidelity weight matrix.
``TVWeight`` : TV term weight matrix.
"""
defaults = copy.deepcopy(admm.ADMM.Options.defaults)
defaults.update(
{'gEvalY': True, 'RelaxParam': 1.8, 'LinSolveCheck': False,
'DFidWeight': 1.0, 'TVWeight': 1.0})
defaults['AutoRho'].update(
{'Enabled': False, 'Period': 1, 'AutoScaling': True,
'Scaling': 1000.0, 'RsdlRatio': 1.2})
def __init__(self, opt=None):
"""
Parameters
----------
opt : dict or None, optional (default None)
TVL1Deconv algorithm options
"""
if opt is None:
opt = {}
admm.ADMM.Options.__init__(self, opt)
if self['AutoRho', 'RsdlTarget'] is None:
self['AutoRho', 'RsdlTarget'] = 1.0
itstat_fields_objfn = ('ObjFun', 'DFid', 'RegTV')
itstat_fields_extra = ('XSlvRelRes',)
hdrtxt_objfn = ('Fnc', 'DFid', 'RegTV')
hdrval_objfun = {'Fnc': 'ObjFun', 'DFid': 'DFid', 'RegTV': 'RegTV'}
def __init__(self, A, S, lmbda, opt=None, axes=(0, 1), caxis=None):
"""
|
**Call graph**
.. image:: ../_static/jonga/tvl1dcn_init.svg
:width: 20%
:target: ../_static/jonga/tvl1dcn_init.svg
|
Parameters
----------
A : array_like
Filter kernel corresponding to operator :math:`H` above
S : array_like
Signal vector or matrix
lmbda : float
Regularisation parameter
opt : TVL1Deconv.Options object
Algorithm options
axes : tuple, optional (default (0, 1))
Axes on which TV regularisation is to be applied
caxis : int or None, optional (default None)
Axis on which channels of a multi-channel image are stacked.
If None, TV regularisation is applied indepdendently to each
channel, otherwise Vector TV :cite:`blomgren-1998-color`
regularisation is applied jointly to all channels.
"""
if opt is None:
opt = TVL1Deconv.Options()
# Set flag indicating whether problem involves real or complex
# values, and get appropriate versions of functions from fft
# module
self.real_dtype = np.isrealobj(S)
self.fftn = fftn_func(self.real_dtype)
self.ifftn = ifftn_func(self.real_dtype)
# Set dtype attribute based on S.dtype and opt['DataType']
self.set_dtype(opt, S.dtype)
self.axes = axes
self.axsz = tuple([S.shape[i] for i in axes])
if caxis is None:
self.saxes = (-1,)
else:
self.saxes = (caxis, -1)
self.lmbda = real_dtype(self.dtype).type(lmbda)
# Set penalty parameter
self.set_attr('rho', opt['rho'], dval=(2.0*self.lmbda + 0.1),
dtype=real_dtype(self.dtype))
yshape = S.shape + (len(axes)+1,)
self.S = np.asarray(S, dtype=self.dtype)
super(TVL1Deconv, self).__init__(S.size, yshape, yshape, S.dtype, opt)
self.axshp = tuple([S.shape[k] for k in axes])
self.A = atleast_nd(S.ndim, A.astype(self.dtype))
self.Af = self.fftn(self.A, self.axshp, axes=axes)
self.Sf = self.fftn(self.S, axes=axes)
self.AHAf = np.conj(self.Af)*self.Af
self.AHSf = np.conj(self.Af)*self.Sf
self.Wdf = np.asarray(self.opt['DFidWeight'],
dtype=real_dtype(self.dtype))
self.Wtv = np.asarray(self.opt['TVWeight'],
dtype=real_dtype(self.dtype))
if hasattr(self.Wtv, 'ndim') and self.Wtv.ndim == S.ndim:
self.Wtvna = self.Wtv[..., np.newaxis]
else:
self.Wtvna = self.Wtv
self.Gf, self.GHGf = gradient_filters(S.ndim, axes, self.axshp,
dtype=self.dtype)
self.GAf = np.concatenate((self.Gf, self.Af[..., np.newaxis]),
axis=self.Gf.ndim-1)
def uinit(self, ushape):
"""Return initialiser for working variable U."""
if self.opt['Y0'] is None:
return np.zeros(ushape, dtype=self.dtype)
else:
# If initial Y is non-zero, initial U is chosen so that
# the relevant dual optimality criterion (see (3.10) in
# boyd-2010-distributed) is satisfied.
Yss = np.sqrt(np.sum(self.Y[..., 0:-1]**2, axis=self.S.ndim,
keepdims=True))
U0 = (self.lmbda/self.rho)*zdivide(self.Y[..., 0:-1], Yss)
U1 = (1.0 / self.rho)*np.sign(self.Y[..., -1:])
return np.concatenate((U0, U1), axis=self.S.ndim)
def xstep(self):
r"""Minimise Augmented Lagrangian with respect to
:math:`\mathbf{x}`.
"""
b = self.AHSf + np.sum(
np.conj(self.GAf) * self.fftn(self.Y-self.U, axes=self.axes),
axis=self.Y.ndim-1)
self.Xf = b / (self.AHAf + self.GHGf)
self.X = self.ifftn(self.Xf, self.axsz, axes=self.axes)
if self.opt['LinSolveCheck']:
ax = (self.AHAf + self.GHGf)*self.Xf
self.xrrs = rrs(ax, b)
else:
self.xrrs = None
def ystep(self):
r"""Minimise Augmented Lagrangian with respect to
:math:`\mathbf{y}`.
"""
self.Y[..., 0:-1] = prox_l2(
self.AX[..., 0:-1] + self.U[..., 0:-1],
(self.lmbda/self.rho)*self.Wtvna, axis=self.saxes)
self.Y[..., -1] = prox_l1(
self.AX[..., -1] + self.U[..., -1] - self.S,
(1.0/self.rho)*self.Wdf)
def obfn_gvar(self):
"""Variable to be evaluated in computing regularisation term,
depending on 'gEvalY' option value.
"""
if self.opt['gEvalY']:
return self.Y
else:
return self.cnst_A(None, self.Xf) - self.cnst_c()
def eval_objfn(self):
r"""Compute components of objective function as well as total
contribution to objective function. Data fidelity term is
:math:`\| W_{\mathrm{df}} (H \mathbf{x} - \mathbf{s}) \|_1` and
regularisation term is :math:`\| W_{\mathrm{tv}}
\sqrt{(G_r \mathbf{x})^2 + (G_c \mathbf{x})^2}\|_1`.
"""
if self.real_dtype:
gvr = self.obfn_gvar()
else:
gvr = np.abs(self.obfn_gvar())
dfd = np.sum(self.Wdf * np.abs(gvr[..., -1]))
reg = np.sum(self.Wtv * np.sqrt(np.sum(gvr[..., 0:-1]**2,
axis=self.saxes)))
obj = dfd + self.lmbda*reg
return (obj, dfd, reg)
def itstat_extra(self):
"""Non-standard entries for the iteration stats record tuple."""
return (self.xrrs,)
def cnst_A(self, X, Xf=None):
r"""Compute :math:`A \mathbf{x}` component of ADMM problem
constraint. In this case :math:`A \mathbf{x} = (G_r^T \;\;
G_c^T \;\; H)^T \mathbf{x}`.
"""
if Xf is None:
Xf = self.fftn(X, axes=self.axes)
return self.ifftn(self.GAf*Xf[..., np.newaxis], self.axsz,
axes=self.axes)
def cnst_AT(self, X):
r"""Compute :math:`A^T \mathbf{x}` where :math:`A \mathbf{x}` is
a component of ADMM problem constraint. In this case
:math:`A^T \mathbf{x} = (G_r^T \;\; G_c^T \;\; H^T) \mathbf{x}`.
"""
Xf = self.fftn(X, axes=self.axes)
return np.sum(self.ifftn(np.conj(self.GAf)*Xf, self.axsz,
axes=self.axes), axis=self.Y.ndim-1)
def cnst_B(self, Y):
r"""Compute :math:`B \mathbf{y}` component of ADMM problem
constraint. In this case :math:`B \mathbf{y} = -\mathbf{y}`.
"""
return -Y
def cnst_c(self):
r"""Compute constant component :math:`\mathbf{c}` of ADMM problem
constraint. In this case :math:`\mathbf{c} = (\mathbf{0} \;\;
\mathbf{0} \;\; \mathbf{s})`.
"""
c = np.zeros(self.S.shape + (len(self.axes)+1,), self.dtype)
c[..., -1] = self.S
return c
def rsdl_s(self, Yprev, Y):
"""Compute dual residual vector."""
return self.rho*np.linalg.norm(self.cnst_AT(self.U))
def rsdl_sn(self, U):
"""Compute dual residual normalisation term."""
return self.rho*np.linalg.norm(U)
|
bsd-3-clause
| 4,074,499,690,839,444,500 | 31.373844 | 79 | 0.541478 | false | 3.305057 | false | false | false |
sukritranjan/ranjansasselov2016b
|
compute_UV_doses.py
|
1
|
29816
|
# -*- coding: iso-8859-1 -*-
"""
This code is used to weigh the UV radiances we compute by biological action spectra.
"""
########################
###Import useful libraries
########################
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import pdb
from matplotlib.pyplot import cm
from scipy import interpolate as interp
import scipy.integrate
########################
###Set physical constants
########################
hc=1.98645e-9 #value of h*c in erg*nm
def cm2inch(cm): #function to convert cm to inches; useful for complying with Astrobiology size guidelines
return cm/2.54
########################
###Decide which bits of the calculation will be run
########################
plotactionspec=False #if true, plots the action spectra we are using.
plotactionspec_talk=False #if true, plots the action spectra we are using...but, optimized for a talk instead of a paper
calculatealbaz=False #if true, generates the table for the albedo and zenith angle study
calculateco2=False #if true, generates the table for the co2 study
calculatealtgas=True #if true, generates the table for the alternate gas study
########################
###Helper functions: I/O
########################
def get_UV(filename):
"""
Input: filename (including path)
Output: (wave_leftedges, wav_rightedges, surface radiance) in units of (nm, nm, photons/cm2/sec/nm)
"""
wav_leftedges, wav_rightedges, wav, toa_intensity, surface_flux, surface_intensity, surface_intensity_diffuse, surface_intensity_direct=np.genfromtxt(filename, skip_header=1, skip_footer=0, usecols=(0, 1, 2,3,4,6,7,8), unpack=True)
surface_intensity_photons=surface_intensity*(wav/(hc))
return wav_leftedges, wav_rightedges, surface_intensity_photons
########################
###Helper functions: UV Dosimeters
########################
def integrated_radiance(wav_left, wav_right, surf_int, leftlim, rightlim):
"""
Computes the surface radiance integrated from leftlim to rightlim. Does this by doing a trapezoid sum. NOTE: The method I have chosen works only so long as the limits line up with the bin edges!
wav_left: left edge of wavelength bin, in nm
wav_right: right edge of wavelength bin, in nm
surf_int: total surface intensity (radiance, hemispherically-integrated) in photons/cm2/s/nm, in bin defined by wav_left and wav_right
produceplots: if True, shows plots of what it is computing
returnxy: if True, returns x,y for action spectrum.
"""
allowed_inds=np.where((wav_left>=leftlim) & (wav_right<=rightlim))
delta_wav=wav_right[allowed_inds]-wav_left[allowed_inds]
surf_int_integrated=np.sum(surf_int[allowed_inds]*delta_wav) #integration converts from photons/cm2/s/nm to photons/cm2/s
return surf_int_integrated
def tricyano_aqe_prodrate(wav_left, wav_right, surf_int, lambda0, produceplots, returnxy):
"""
Weights the input surface intensities by the action spectrum for the photoproduction of aquated electrons from Ritson+2012 and Patel+2015, i.e. irradiation of tricyano cuprate. The action spectrum is composed of the absorption spectrum multiplied by an assumed quantum yield function. We assume the QY function to be a step function, stepping from 0 at wavelengths longer than lambda0 to 0.06 at wavelengths shorter than lambda0. We choose 0.06 for the step function to match the estimate found by Horvath+1984; we note this value may be pH sensitive. Empirically, we know that lambda0>254 nm, but that's about it.
This process is an eustressor for abiogenesis.
wav_left: left edge of wavelength bin, in nm
wav_right: right edge of wavelength bin, in nm
surf_int: total surface intensity (radiance, hemispherically-integrated) in photons/cm2/s/nm, in bin defined by wav_left and wav_right
lambda0: value assume for lambda0.
produceplots: if True, shows plots of what it is computing
returnxy: if True, returns x,y for action spectrum.
"""
####Step 1: reduce input spectrum to match bounds of available dataset.
int_min=190.0 #This lower limit of integration is set by the limits of the cucn3 absorption dataset (left edge of bin)
int_max=351.0 #This upper limit of integration is set by the limits of the cucn3 absorption dataset (right edge of bin)
allowed_inds=np.where((wav_left>=int_min) & (wav_right<=int_max)) #indices that correspond to included data
wav_left=wav_left[allowed_inds]
wav_right=wav_right[allowed_inds]
surf_int=surf_int[allowed_inds]
delta_wav=wav_right-wav_left #size of wavelength bins in nm
####Step 2: form the action spectrum from the absorption spectrum and QY curve.
#Import the tricyanocuprate absorption spectrum
importeddata=np.genfromtxt('./Raw_Data/Magnani_Data/CuCN3_XC.dat', skip_header=2)
cucn3_wav=importeddata[:,0] #wav in nm
cucn3_molabs=importeddata[:,1] #molar absorptivities in L/(mol*cm), decadic
cucn3_molabs_func=interp.interp1d(cucn3_wav, cucn3_molabs, kind='linear') #functionalized form of cucn3 molar absorption
#does not matter if you use decadic or natural logarithmic as constant factors normalize out anyway
#Formulate the step-function quantum yield curve
def qy_stepfunc(wav, lambda0): #step function, for the photoionization model
"""Returns 1 for wav<=lambda0 and 0 for wav>lambda0"""
qy=np.zeros(np.size(wav))# initialize all to zero
inds=np.where(wav<=lambda0) #indices where the wavelength is below the threshold
qy[inds]=qy[inds]+0.06 #increase the QE to 1 at the indices where the wavelength is below the threshold
return qy
#Integrate these quantities to match the input spectral resolution
qy_dist=np.zeros(np.shape(wav_left))#initialize variable to hold the QY integrated over the surface intensity wavelength bins
cucn3_molabs_dist=np.zeros(np.shape(wav_left))#initialize variable to hold the QY integrated over the surface intensity wavelength bins
for ind in range(0, len(wav_left)):
leftedge=wav_left[ind]
rightedge=wav_right[ind]
cucn3_molabs_dist[ind]=scipy.integrate.quad(cucn3_molabs_func, leftedge, rightedge, epsabs=0, epsrel=1e-5)[0]/(rightedge-leftedge)
qy_dist[ind]=scipy.integrate.quad(qy_stepfunc, leftedge, rightedge, args=(lambda0), epsabs=0, epsrel=1e-5)[0]/(rightedge-leftedge)
action_spectrum=cucn3_molabs_dist*qy_dist
#Normalize action spectrum to 1 at 195 (arbitrary)
action_spectrum=action_spectrum*(1./(np.interp(190., 0.5*(wav_left+wav_right), action_spectrum)))
####Step 3: Compute action-spectrum weighted total intensity
weighted_surface_intensity=surf_int*action_spectrum
total_weighted_radiance=np.sum(weighted_surface_intensity*delta_wav) #units: photons/cm2/s
####Step 4 (Optional): Plot various components of action spectrum to show the multiplication
if produceplots:
legendfontsize=12
axisfontsize=12
##Plot ribonucleotide absorption and interpolation
fig1, axarr=plt.subplots(3,2,sharex=True, figsize=(8., 10.5)) #specify figure size (width, height) in inches
axarr[0,0].bar(wav_left, surf_int,width=delta_wav, color='black', alpha=0.5, log=True)
axarr[0,0].set_ylim([1e10,1e16])
axarr[0,0].legend(loc=2, prop={'size':legendfontsize})
axarr[0,0].yaxis.grid(True)
axarr[0,0].xaxis.grid(True)
axarr[0,0].set_ylabel('Surface Radiance \n(photons cm$^{-2}$s$^{-1}$nm$^{-1}$)', fontsize=axisfontsize)
#axarr[0,0].title.set_position([0.5, 1.11])
#axarr[0,0].text(0.5, 1.1, r'a(i)', transform=axarr[0].transAxes, va='top')
axarr[1,0].bar(wav_left, cucn3_molabs_dist,width=delta_wav, color='black', alpha=0.5, log=True)
#axarr[1,0].set_ylim([-0.1, 1.1])
axarr[1,0].legend(loc=6, prop={'size':legendfontsize})
axarr[1,0].yaxis.grid(True)
axarr[1,0].xaxis.grid(True)
axarr[1,0].set_ylabel('CuCN3 Molar Absorptivity\n(M$^{-1}$cm$^{-1}$)', fontsize=axisfontsize)
#axarr[1,0].text(0.5, 1.10, r'b(i)', fontsize=12, transform=axarr[1].transAxes, va='top')
axarr[2,0].bar(wav_left, qy_dist,width=delta_wav, color='black', alpha=0.5)
axarr[2,0].set_ylim([-0.01, 0.06])
axarr[2,0].legend(loc=6, prop={'size':legendfontsize})
axarr[2,0].yaxis.grid(True)
axarr[2,0].xaxis.grid(True)
axarr[2,0].set_ylabel('Quantum Efficiency \n(reductions absorption$^{-1}$)', fontsize=axisfontsize)
#axarr[2,0].text(0.5, 1.10, r'c(i)', fontsize=12,transform=axarr[2].transAxes, va='top')
axarr[0,1].bar(wav_left, action_spectrum,width=delta_wav, color='black', alpha=0.5)
#axarr[0,1].set_ylim([-0.1, 1.1])
axarr[0,1].legend(loc=6, prop={'size':legendfontsize})
axarr[0,1].yaxis.grid(True)
axarr[0,1].xaxis.grid(True)
axarr[0,1].set_ylabel('Action Spectrum', fontsize=axisfontsize)
#axarr[0,1].text(0.5, 1.10, r'b(i)', fontsize=12, transform=axarr[1].transAxes, va='top')
axarr[1,1].bar(wav_left, weighted_surface_intensity,width=delta_wav, color='black', alpha=0.5)
#axarr[1,1].set_ylim([-0.1, 1.1])
axarr[1,1].legend(loc=6, prop={'size':legendfontsize})
axarr[1,1].yaxis.grid(True)
axarr[1,1].xaxis.grid(True)
axarr[1,1].set_ylabel('Weighted Surface Radiance', fontsize=axisfontsize)
#axarr[1,1].text(0.5, 1.10, r'b(i)', fontsize=12, transform=axarr[1].transAxes, va='top')
#plt.savefig('/home/sranjan/Python/UV/Plots/ritson_assumed_qe_v3.pdf', orientation='portrait',papertype='letter', format='pdf')
plt.show()
if returnxy:
return 0.5*(wav_left+wav_right), action_spectrum
else:
return total_weighted_radiance
def ump_glycosidic_photol(wav_left, wav_right, surf_int, lambda0, produceplots, returnxy):
"""
Weights the input surface intensities by the action spectrum for cleavage of the glycosidic bond in UMP (the U-RNA monomer), aka base release. We form this spectrum by convolving the pH=7.6 absorption spectrum for Uridine-3'-(2')-phosporic acid (i.e. uridylic acid, UMP) from Voet et al (1963) with an assumed QY curve. The QY curve is based on the work of Gurzadyan and Gorner (1994); they measure (wavelength, QY) for N-glycosidic bond cleavage in UMP in anoxic aqueous solution (Ar-suffused) to be (193 nm, 4.3e-3) and (254 nm, (2-3)e-5). Specifically, we assume that QY=4.3e-3 for lambda<=lambda_0 and QY=2.5e-5 for lambda>lambda_0. natural choices of lambda_0 are 194, 254, and 230 (first two: empirical limits. Last: end of pi-pi* absorption bad, Sinsheimer+1949 suggest it is onset of irreversible photolytic damage).
This process is a stressor for abiogenesis.
wav_left: left edge of wavelength bin, in nm
wav_right: right edge of wavelength bin, in nm
surf_int: total surface intensity (radiance, hemispherically-integrated) in photons/cm2/s/nm, in bin defined by wav_left and wav_right
lambda0: value assume for lambda0.
produceplots: if True, shows plots of what it is computing
returnxy: if True, returns x,y for action spectrum.
"""
####Step 1: reduce input spectrum to match bounds of available dataset (absorption).
int_min=184.0 #This lower limit of integration is set by the limits of the cucn3 absorption dataset (left edge of bin)
int_max=299.0 #This upper limit of integration is set by the limits of the cucn3 absorption dataset (right edge of bin)
allowed_inds=np.where((wav_left>=int_min) & (wav_right<=int_max)) #indices that correspond to included data
wav_left=wav_left[allowed_inds]
wav_right=wav_right[allowed_inds]
surf_int=surf_int[allowed_inds]
delta_wav=wav_right-wav_left #size of wavelength bins in nm
####Step 2: form the action spectrum from the absorption spectrum and QY curve.
#Import the UMP absorption spectrum from Voet et al 1963
importeddata=np.genfromtxt('./Raw_Data/Voet_Data/ribouridine_pH_7.3_v2.txt', skip_header=0, delimiter=',')
ump_wav=importeddata[:,0] #wav in nm
ump_molabs=importeddata[:,1] #molar absorptivities\times 10^{3}, i.e. in units of 10^{-3} L/(mol*cm), decadic (I think -- unit scheme unclear in paper. Not important since normalized out)
ump_molabs_func=interp.interp1d(ump_wav, ump_molabs, kind='linear') #functionalized form of molar absorption
#does not matter if you use decadic or natural logarithmic as constant factors normalize out anyway
#Formulate the step-function quantum yield curve
def qy_stepfunc(wav, lambda0): #step function, for the photoionization model
"""QY based on work of Gurzadyan and Gorner 1994"""
qy=np.zeros(np.size(wav))# initialize all to zero
inds1=np.where(wav<=lambda0) #indices where the wavelength is below the threshold
inds2=np.where(wav>lambda0) #indices where the wavelength is below the threshold
qy[inds1]=qy[inds1]+4.3e-3 #High QY for lambda<=lambda0
qy[inds2]=qy[inds2]+2.5e-5 #Low QY for lambda>lambda0
return qy
#Integrate these quantities to match the input spectral resolution
qy_dist=np.zeros(np.shape(wav_left))#initialize variable to hold the QY integrated over the surface intensity wavelength bins
ump_molabs_dist=np.zeros(np.shape(wav_left))#initialize variable to hold the UMP absorption integrated over the surface intensity wavelength bins
for ind in range(0, len(wav_left)):
leftedge=wav_left[ind]
rightedge=wav_right[ind]
ump_molabs_dist[ind]=scipy.integrate.quad(ump_molabs_func, leftedge, rightedge, epsabs=0, epsrel=1e-5)[0]/(rightedge-leftedge)
qy_dist[ind]=scipy.integrate.quad(qy_stepfunc, leftedge, rightedge, args=(lambda0),epsabs=0, epsrel=1e-5)[0]/(rightedge-leftedge)
action_spectrum=ump_molabs_dist*qy_dist
#Normalize action spectrum to 1 at 195 (arbitrary)
action_spectrum=action_spectrum*(1./(np.interp(190., 0.5*(wav_left+wav_right), action_spectrum)))
####Step 3: Compute action-spectrum weighted total intensity
weighted_surface_intensity=surf_int*action_spectrum
total_weighted_radiance=np.sum(weighted_surface_intensity*delta_wav) #units: photons/cm2/s
####Step 4 (Optional): Plot various components of action spectrum to show the multiplication
if produceplots:
legendfontsize=12
axisfontsize=12
##Plot ribonucleotide absorption and interpolation
fig1, axarr=plt.subplots(3,2,sharex=True, figsize=(8., 10.5)) #specify figure size (width, height) in inches
axarr[0,0].bar(wav_left, surf_int,width=delta_wav, color='black', alpha=0.5, log=True)
axarr[0,0].set_ylim([1e10,1e16])
axarr[0,0].legend(loc=2, prop={'size':legendfontsize})
axarr[0,0].yaxis.grid(True)
axarr[0,0].xaxis.grid(True)
axarr[0,0].set_ylabel('Surface Radiance \n(photons cm$^{-2}$s$^{-1}$nm$^{-1}$)', fontsize=axisfontsize)
#axarr[0,0].title.set_position([0.5, 1.11])
#axarr[0,0].text(0.5, 1.1, r'a(i)', transform=axarr[0].transAxes, va='top')
axarr[1,0].bar(wav_left, ump_molabs_dist,width=delta_wav, color='black', alpha=0.5, log=False)
#axarr[1,0].set_ylim([-0.1, 1.1])
axarr[1,0].legend(loc=6, prop={'size':legendfontsize})
axarr[1,0].yaxis.grid(True)
axarr[1,0].xaxis.grid(True)
axarr[1,0].set_ylabel('UMP Molar Absorptivity\n(M$^{-1}$cm$^{-1}$)', fontsize=axisfontsize)
#axarr[1,0].text(0.5, 1.10, r'b(i)', fontsize=12, transform=axarr[1].transAxes, va='top')
axarr[2,0].bar(wav_left, qy_dist,width=delta_wav, color='black', alpha=0.5, log=True)
axarr[2,0].set_ylim([1e-5, 1e-2])
axarr[2,0].legend(loc=6, prop={'size':legendfontsize})
axarr[2,0].yaxis.grid(True)
axarr[2,0].xaxis.grid(True)
axarr[2,0].set_ylabel('Quantum Efficiency \n(reductions absorption$^{-1}$)', fontsize=axisfontsize)
#axarr[2,0].text(0.5, 1.10, r'c(i)', fontsize=12,transform=axarr[2].transAxes, va='top')
axarr[0,1].bar(wav_left, action_spectrum,width=delta_wav, color='black', alpha=0.5)
#axarr[0,1].set_ylim([-0.1, 1.1])
axarr[0,1].legend(loc=6, prop={'size':legendfontsize})
axarr[0,1].yaxis.grid(True)
axarr[0,1].xaxis.grid(True)
axarr[0,1].set_ylabel('Action Spectrum', fontsize=axisfontsize)
#axarr[0,1].text(0.5, 1.10, r'b(i)', fontsize=12, transform=axarr[1].transAxes, va='top')
axarr[1,1].bar(wav_left, weighted_surface_intensity,width=delta_wav, color='black', alpha=0.5)
#axarr[1,1].set_ylim([-0.1, 1.1])
axarr[1,1].legend(loc=6, prop={'size':legendfontsize})
axarr[1,1].yaxis.grid(True)
axarr[1,1].xaxis.grid(True)
axarr[1,1].set_ylabel('Weighted Surface Radiance', fontsize=axisfontsize)
#axarr[1,1].text(0.5, 1.10, r'b(i)', fontsize=12, transform=axarr[1].transAxes, va='top')
#plt.savefig('/home/sranjan/Python/UV/Plots/ritson_assumed_qe_v3.pdf', orientation='portrait',papertype='letter', format='pdf')
plt.show()
if returnxy:
return 0.5*(wav_left+wav_right), action_spectrum
else:
return total_weighted_radiance
########################
###Plot UV Dosimeters
########################
if plotactionspec:
#Set up wavelength scale
wave_left=np.arange(100., 500.)
wave_right=np.arange(101., 501.)
wave_centers=0.5*(wave_left+wave_right)
surf_int=np.ones(np.shape(wave_centers)) #for our purposes here, this is a thunk.
#Extract action spectra
wav_gly_193, actspec_gly_193=ump_glycosidic_photol(wave_left, wave_right, surf_int, 193., False, True)
wav_gly_230, actspec_gly_230=ump_glycosidic_photol(wave_left, wave_right, surf_int, 230., False, True)
wav_gly_254, actspec_gly_254=ump_glycosidic_photol(wave_left, wave_right, surf_int, 254., False, True)
wav_aqe_254, actspec_aqe_254=tricyano_aqe_prodrate(wave_left, wave_right, surf_int, 254., False, True)
wav_aqe_300, actspec_aqe_300=tricyano_aqe_prodrate(wave_left, wave_right, surf_int, 300., False, True)
#####Plot action spectra
#Initialize Figure
fig, (ax1)=plt.subplots(1, figsize=(cm2inch(16.5),6), sharex=True)
colorseq=iter(cm.rainbow(np.linspace(0,1,5)))
#Plot Data
ax1.plot(wav_gly_193,actspec_gly_193, linestyle='-',linewidth=2, color=next(colorseq), label=r'UMP Gly Bond Cleavage ($\lambda_0=193$)')
ax1.plot(wav_gly_230,actspec_gly_230, linestyle='-',linewidth=2, color=next(colorseq), label=r'UMP Gly Bond Cleavage ($\lambda_0=230$)')
ax1.plot(wav_gly_254,actspec_gly_254, linestyle='-',linewidth=2, color=next(colorseq), label=r'UMP Gly Bond Cleavage ($\lambda_0=254$)')
ax1.plot(wav_aqe_254,actspec_aqe_254, linestyle='-',linewidth=2, color=next(colorseq), label=r'CuCN$_{3}$$^{2-}$ Photoionization ($\lambda_0=254$)')
ax1.plot(wav_aqe_300,actspec_aqe_300, linestyle='--',linewidth=2, color=next(colorseq), label=r'CuCN$_{3}$$^{2-}$ Photoionization ($\lambda_0=300$)')
#####Finalize and save figure
ax1.set_title(r'Action Spectra')
ax1.set_xlim([180.,360.])
ax1.set_xlabel('nm')
ax1.set_ylabel(r'Relative Sensitivity')
ax1.set_yscale('log')
ax1.set_ylim([1e-6, 1e2])
#ax1.legend(bbox_to_anchor=[0, 1.1, 1,1], loc=3, ncol=2, mode='expand', borderaxespad=0., fontsize=10)
ax1.legend(loc='upper right', ncol=1, fontsize=10)
plt.tight_layout(rect=(0,0,1,1))
plt.savefig('./Plots/actionspectra.eps', orientation='portrait',papertype='letter', format='eps')
if plotactionspec_talk:
#Set up wavelength scale
wave_left=np.arange(100., 500.)
wave_right=np.arange(101., 501.)
wave_centers=0.5*(wave_left+wave_right)
surf_int=np.ones(np.shape(wave_centers)) #for our purposes here, this is a thunk.
#Extract action spectra
wav_gly_193, actspec_gly_193=ump_glycosidic_photol(wave_left, wave_right, surf_int, 193., False, True)
wav_gly_230, actspec_gly_230=ump_glycosidic_photol(wave_left, wave_right, surf_int, 230., False, True)
wav_gly_254, actspec_gly_254=ump_glycosidic_photol(wave_left, wave_right, surf_int, 254., False, True)
wav_aqe_254, actspec_aqe_254=tricyano_aqe_prodrate(wave_left, wave_right, surf_int, 254., False, True)
wav_aqe_300, actspec_aqe_300=tricyano_aqe_prodrate(wave_left, wave_right, surf_int, 300., False, True)
#####Plot action spectra
#Initialize Figure
fig, (ax1)=plt.subplots(1, figsize=(10,9), sharex=True)
colorseq=iter(cm.rainbow(np.linspace(0,1,5)))
#Plot Data
ax1.plot(wav_gly_193,actspec_gly_193, linestyle='-',linewidth=3, color=next(colorseq), label=r'UMP-193')
ax1.plot(wav_gly_230,actspec_gly_230, linestyle='-',linewidth=3, color=next(colorseq), label=r'UMP-230')
ax1.plot(wav_gly_254,actspec_gly_254, linestyle='-',linewidth=3, color=next(colorseq), label=r'UMP-254')
ax1.plot(wav_aqe_254,actspec_aqe_254, linestyle='-',linewidth=3, color=next(colorseq), label=r'CuCN3-254')
ax1.plot(wav_aqe_300,actspec_aqe_300, linestyle='--',linewidth=3, color=next(colorseq), label=r'CuCN3-300')
#####Finalize and save figure
ax1.set_title(r'Action Spectra', fontsize=24)
ax1.set_xlim([180.,360.])
ax1.set_xlabel('nm',fontsize=24)
ax1.set_ylabel(r'Relative Sensitivity', fontsize=24)
ax1.set_yscale('log')
ax1.set_ylim([1e-6, 1e2])
ax1.legend(bbox_to_anchor=[0, 1.1, 1,0.5], loc=3, ncol=2, mode='expand', borderaxespad=0., fontsize=24)
#ax1.legend(loc='upper right', ncol=1, fontsize=16)
ax1.xaxis.set_tick_params(labelsize=24)
ax1.yaxis.set_tick_params(labelsize=24)
plt.tight_layout(rect=(0,0,1,0.75))
plt.savefig('./TalkFigs/actionspectra.pdf', orientation='portrait',papertype='letter', format='pdf')
########################
###Set "base" values to normalize the alb-zen, co2, and alt-gas dosimeters by
########################
#Use the TOA flux in order to get a good, physically understandable denominator.
wav_leftedges, wav_rightedges, wav, toa_intensity=np.genfromtxt('./TwoStreamOutput/AlbZen/rugheimer_earth_epoch0_a=0.2_z=60.dat', skip_header=1, skip_footer=0, usecols=(0, 1,2, 3), unpack=True)
toa_intensity_photons=toa_intensity*(wav/(hc))
#Compute base doses
intrad100_165_base=integrated_radiance(wav_leftedges, wav_rightedges, toa_intensity_photons, 100, 165.) #This measures the flux vulnerable to activity
intrad200_300_base=integrated_radiance(wav_leftedges, wav_rightedges, toa_intensity_photons, 200., 300.) #This is just an empirical gauge.
umpgly_193_base=ump_glycosidic_photol(wav_leftedges, wav_rightedges, toa_intensity_photons, 193., False, False)
umpgly_230_base=ump_glycosidic_photol(wav_leftedges, wav_rightedges, toa_intensity_photons,230., False, False)
umpgly_254_base=ump_glycosidic_photol(wav_leftedges, wav_rightedges, toa_intensity_photons, 254., False, False)
tricyano254_base=tricyano_aqe_prodrate(wav_leftedges, wav_rightedges, toa_intensity_photons, 254., False, False)
tricyano300_base=tricyano_aqe_prodrate(wav_leftedges, wav_rightedges, toa_intensity_photons, 300., False, False)
########################
###Run code for albedo, zenith angle
########################
if calculatealbaz:
#Evaluate only two zenith angles (to show range of variation)
zenithangles=['66.5', '0']
albedos=['tundra', 'ocean', 'desert', 'oldsnow', 'newsnow']
for zenind in range(0, len(zenithangles)):
zenithangle=zenithangles[zenind]
for albind in range(0, len(albedos)):
albedo=albedos[albind]
datafile='./TwoStreamOutput/AlbZen/rugheimer_earth_epoch0_a='+albedo+'_z='+zenithangle+'.dat'
left, right, surface_int=get_UV(datafile)
intrad100_165=integrated_radiance(left, right, surface_int, 100, 165.) #This measures the flux vulnerable to activity
intrad200_300=integrated_radiance(left, right, surface_int, 200., 300.) #This is just an empirical gauge.
umpgly_193=ump_glycosidic_photol(left, right, surface_int, 193., False, False)
umpgly_230=ump_glycosidic_photol(left, right, surface_int,230., False, False)
umpgly_254=ump_glycosidic_photol(left, right, surface_int, 254., False, False)
tricyano254=tricyano_aqe_prodrate(left, right, surface_int, 254., False, False)
tricyano300=tricyano_aqe_prodrate(left, right, surface_int, 300., False, False)
line=np.array([zenithangle, albedo, intrad100_165/intrad100_165_base,intrad200_300/intrad200_300_base, umpgly_193/umpgly_193_base, umpgly_230/umpgly_230_base, umpgly_254/umpgly_254_base, tricyano254/tricyano254_base, tricyano300/tricyano300_base])
if (albind==0 and zenind==0):
albzentable=line #need to initialize in this case
else:
albzentable=np.vstack((albzentable, line))
#Save output
f=open('./Doses/albzen_uv_doses.dat','w')
f.write('All Dosimeters Normalized to Space Radiation Case\n')
np.savetxt(f, albzentable, delimiter=' ', fmt='%s', newline='\n', header='Zenith Angle & Albedo & Radiance (100-165 nm) & Radiance (200-300 nm) & UMP Gly Cleavage (lambda0=193nm) & UMP Gly Cleavage (lambda0=230nm) & UMP Gly Cleavage (lambda0=254nm) & CuCN3 Photoionization (lambda0=254 nm) & CuCN3 Photoionization (lambda0=300 nm)\n')
f.close()
########################
###Run code for varying CO2 levels
########################
if calculateco2:
N_co2_rugh=2.09e24 #column density of CO2 in Rugheimer base model (cm**-2)
co2multiples=np.array([0., 1.e-6,1.e-5, 1.e-4, 1.e-3, 0.00893, 1.e-2, 1.e-1, 0.6, 1., 1.33, 1.e1, 46.6, 1.e2, 470., 1.e3])
zenithangles=['0', '66.5']
albedos=['newsnow', 'tundra']
for surfind in range(0, len(zenithangles)):
albedo=albedos[surfind]
zenithangle=zenithangles[surfind]
for multind in range(0, len(co2multiples)):
multiple=co2multiples[multind]
colden_co2=N_co2_rugh*multiple
datafile='./TwoStreamOutput/CO2lim/surface_intensities_co2limits_co2multiple='+str(multiple)+'_a='+albedo+'_z='+zenithangle+'.dat'
left, right, surface_int=get_UV(datafile)
intrad100_165=integrated_radiance(left, right, surface_int, 100, 165.) #This measures the flux vulnerable to activity
intrad200_300=integrated_radiance(left, right, surface_int, 200., 300.) #This is just an empirical gauge.
umpgly_193=ump_glycosidic_photol(left, right, surface_int, 193., False, False)
umpgly_230=ump_glycosidic_photol(left, right, surface_int,230., False, False)
umpgly_254=ump_glycosidic_photol(left, right, surface_int, 254., False, False)
tricyano254=tricyano_aqe_prodrate(left, right, surface_int, 254., False, False)
tricyano300=tricyano_aqe_prodrate(left, right, surface_int, 300., False, False)
#print intrad200_300
#pdb.set_trace()
line=np.array([zenithangle, albedo, colden_co2, intrad100_165/intrad100_165_base,intrad200_300/intrad200_300_base, umpgly_193/umpgly_193_base, umpgly_230/umpgly_230_base, umpgly_254/umpgly_254_base, tricyano254/tricyano254_base, tricyano300/tricyano300_base])
if (multind==0 and surfind==0):
co2table=line #need to initialize in this case
else:
co2table=np.vstack((co2table, line))
#Save Output
f=open('./Doses/co2_uv_doses.dat','w')
f.write('All Dosimeters Normalized to Space Radiation Case\n')
np.savetxt(f, co2table, delimiter=' ', fmt='%s', newline='\n', header='Zenith Angle & Albedo & Radiance (100-165 nm) & Radiance (200-300 nm) & UMP Gly Cleavage (lambda0=193nm) & UMP Gly Cleavage (lambda0=230nm) & UMP Gly Cleavage (lambda0=254nm) & CuCN3 Photoionization (lambda0=254 nm) & CuCN3 Photoionization (lambda0=300 nm)\n')
f.close()
########################
###Run code for alternate gas absorption.
########################
if calculatealtgas:
#####Set up info about the files to extract # All are the maximum possible natural surface radiance case (z=0, albedo=fresh snow) aka "max"
N_tot=2.0925e25#total column density of Rugheimer+2015 model in cm**-2
gaslist=['h2o', 'ch4', 'so2', 'o2', 'o3', 'h2s'] #list of gases we are doing this for
base_abundances=np.array([4.657e-3, 1.647e-6, 3.548e-11, 2.241e-6, 8.846e-11, 7.097e-11]) #molar concentration of each of these gases in the Rugheimer model.
gasmultiples={}#dict holding the multiples of the molar concentration we are using
gasmultiples['h2o']=np.array([1.e-5, 1.e-4, 1.e-3, 1.e-2, 1.e-1, 1., 1.e1, 1.e2, 1.e3, 1.e4, 1.e5])
gasmultiples['ch4']=np.array([1.e-5, 1.e-4, 1.e-3, 1.e-2, 1.e-1, 1., 1.e1, 1.e2, 1.e3, 1.e4, 1.e5])
gasmultiples['so2']=np.array([1.e-5, 1.e-4, 1.e-3, 1.e-2, 1.e-1, 1., 1.e1, 1.e2, 1.e3, 1.e4, 1.e5, 1.e6, 1.e7])
gasmultiples['o2']=np.array([1.e-5, 1.e-4, 1.e-3, 1.e-2, 1.e-1, 1., 1.e1, 1.e2, 1.e3, 1.e4, 1.e5])
gasmultiples['o3']=np.array([1.e-5, 1.e-4, 1.e-3, 1.e-2, 1.e-1, 1., 1.e1, 1.e2, 1.e3, 1.e4, 1.e5])
gasmultiples['h2s']=np.array([1.e-5, 1.e-4, 1.e-3, 1.e-2, 1.e-1, 1., 1.e1, 1.e2, 1.e3, 1.e4, 1.e5, 1.e6, 1.e7])
#####In a loop, extract the files and compute the statistics
for gasind in range(0, len(gaslist)):
gas=gaslist[gasind]
base_abundance=base_abundances[gasind]
multiples=gasmultiples[gas]
for multind in range(0, len(multiples)):
multiple=multiples[multind]
colden_X=base_abundance*multiple*N_tot #total column density of gas X
datafile='./TwoStreamOutput/gaslim/surface_intensities_'+gas+'limits_'+gas+'multiple='+str(multiple)+'_a=newsnow_z=0.dat'
left, right, surface_int=get_UV(datafile)
intrad100_165=integrated_radiance(left, right, surface_int, 100, 165.) #This measures the flux vulnerable to activity
intrad200_300=integrated_radiance(left, right, surface_int, 200., 300.) #This is just an empirical gauge.
umpgly_193=ump_glycosidic_photol(left, right, surface_int, 193., False, False)
umpgly_230=ump_glycosidic_photol(left, right, surface_int,230., False, False)
umpgly_254=ump_glycosidic_photol(left, right, surface_int, 254., False, False)
tricyano254=tricyano_aqe_prodrate(left, right, surface_int, 254., False, False)
tricyano300=tricyano_aqe_prodrate(left, right, surface_int, 300., False, False)
line=np.array([gas, colden_X, intrad100_165/intrad100_165_base,intrad200_300/intrad200_300_base, umpgly_193/umpgly_193_base, umpgly_230/umpgly_230_base, umpgly_254/umpgly_254_base, tricyano254/tricyano254_base, tricyano300/tricyano300_base])
if (multind==0):
altgastable=line #need to initialize in this case
else:
altgastable=np.vstack((altgastable, line))
f=open('./Doses/'+gas+'_uv_doses.dat','w')
f.write('All Dosimeters Normalized to Space Radiation Case\n')
np.savetxt(f, altgastable, delimiter=' & ', fmt='%s', newline='\n', header='Gas & Column Density (cm-2) & Radiance (100-165 nm) & Radiance (200-300 nm) & UMP Gly Cleavage (lambda0=193nm) & UMP Gly Cleavage (lambda0=230nm) & UMP Gly Cleavage (lambda0=254nm) & CuCN3 Photoionization (lambda0=254 nm) & CuCN3 Photoionization (lambda0=300 nm)\n')
f.close()
#Wrap Up
########################
###Wrap Up
########################
plt.show()
|
mit
| -5,561,900,649,416,079,000 | 54.317254 | 825 | 0.713442 | false | 2.646077 | false | false | false |
nonbiostudent/python-spectroscopy
|
src/spectroscopy/plugins/flyspec.py
|
1
|
8319
|
"""
Plugin to read and write FlySpec data.
"""
import calendar
import datetime
import os
import numpy as np
from spectroscopy.dataset import Dataset, Spectra, ResourceIdentifier, Retrievals
from spectroscopy.plugins import DatasetPluginBase
class FlySpecPluginException(Exception):
pass
class FlySpecPlugin(DatasetPluginBase):
def open(self, filename, format=None, timeshift=0.0, **kargs):
"""
Load data from FlySpec instruments.
:param timeshift: float
:type timeshift: FlySpecs record data in local time so a timeshift in
hours of local time with respect to UTC can be given. For example
`timeshift=12.00` will subtract 12 hours from the recorded time.
"""
# load data and convert southern hemisphere to negative
# latitudes and western hemisphere to negative longitudes
def todd(x):
"""
Convert degrees and decimal minutes to decimal degrees.
"""
idx = x.find('.')
minutes = float(x[idx - 2:]) / 60.
deg = float(x[:idx - 2])
return deg + minutes
data = np.loadtxt(filename, usecols=range(0, 21),
converters={
8: todd,
9: lambda x: -1.0 if x.lower() == 's' else 1.0,
10: todd,
11: lambda x: -1.0 if x.lower() == 'w' else 1.0})
if len(data.shape) < 2:
raise FlySpecPluginException(
'File %s contains only one data point.'
% (os.path.basename(filename)))
ts = -1. * timeshift * 60. * 60.
int_times = np.zeros(data[:, :7].shape, dtype='int')
int_times[:, :6] = data[:, 1:7]
# convert decimal seconds to milliseconds
int_times[:, 6] = (data[:, 6] - int_times[:, 5]) * 1000
times = [datetime.datetime(*int_times[i, :]) +
datetime.timedelta(seconds=ts)
for i in range(int_times.shape[0])]
unix_times = [calendar.timegm(i.utctimetuple()) for i in times]
latitude = data[:, 8] * data[:, 9]
longitude = data[:, 10] * data[:, 11]
elevation = data[:, 12]
so2 = data[:, 16]
angles = data[:, 17]
s = Spectra(self, angle=np.zeros(angles.shape),
position=np.zeros((latitude.size, 3)),
time=np.zeros(angles.shape))
slice_start = 0
slice_end = slice_start
self.d = Dataset(self, spectra=[s])
for a in self._split_by_scan(angles, unix_times, longitude,
latitude, elevation, so2):
slice_end = slice_start + a[0].size
s.angle[slice_start:slice_end] = a[0]
s.time[slice_start:slice_end] = a[1]
position = np.vstack((a[2], a[3], a[4])).T
s.position[slice_start:slice_end, :] = position
r = Retrievals(self,
spectra_id=ResourceIdentifier(s.resource_id.id),
type='FlySpec', gas_species='SO2',
slice=slice(slice_start, slice_end), sca=a[5])
self.d.retrievals.append(r)
slice_start = slice_end
# Consistency check to make sure no data was dropped during slicing
assert s.angle.std() == angles.std()
return self.d
def _array_multi_sort(self, *arrays):
"""
Sorts multiple numpy arrays based on the contents of the first array.
>>> x1 = np.array([4.,5.,1.,2.])
>>> x2 = np.array([10.,11.,12.,13.])
>>> f = FlySpecPlugin()
>>> f._array_multi_sort(*tuple([x1,x2]))
(array([ 1., 2., 4., 5.]), array([ 12., 13., 10., 11.]))
"""
c = np.rec.fromarrays(
arrays, names=[str(i) for i in range(len(arrays))])
c.sort() # sort based on values in first array
return tuple([c[str(i)] for i in range(len(arrays))])
def _split_by_scan(self, angles, *vars_):
"""
Returns an iterator that will split lists/arrays of data by scan (i.e.
between start and end angle) an arbitrary number of lists of data can
be passed in - the iterator will return a list of arrays of length
len(vars_) + 1 with the split angles array at index one, and the
remaining data lists in order afterwards. The lists will be sorted
into ascending angle order.
>>> angles = np.array([30, 35, 40, 35, 30, 35, 40])
>>> f = FlySpecPlugin()
>>> [a[0] for a in f._split_by_scan(angles)]
[array([30, 35, 40]), array([30, 35]), array([35, 40])]
>>> [a[1] for a in f._split_by_scan(angles, np.array([1,2,3,4,5,6,7]))]
[array([1, 2, 3]), array([5, 4]), array([6, 7])]
"""
# everything breaks if there are more than two equal angles in a row.
if np.any(np.logical_and((angles[1:] == angles[:-1])[:-1],
angles[2:] == angles[:-2])):
idx = np.argmax(np.logical_and(
(angles[1:] == angles[:-1])[:-1], angles[2:] == angles[:-2]))
raise ValueError, "Data at line " + str(idx + 2) + \
" contains three or more repeated angle entries (in a row). \
Don't know how to split this into scans."
anglegradient = np.zeros(angles.shape)
anglegradient[1:] = np.diff(angles)
# if there are repeated start or end angles, then you end up with zeros
# in the gradients. Possible zeros at the start need to be dealt with
# separately, otherwise you end up with the first point being put in a
# scan of its own.
if anglegradient[1] == 0:
anglegradient[1] = anglegradient[2]
if anglegradient[-1] == 0:
anglegradient[-1] = anglegradient[-2]
anglegradient[0] = anglegradient[1]
# replace zero gradients within the array with the value of its left
# neighbour
b = np.roll(anglegradient, 1)
b[0] = anglegradient[0]
anglegradient = np.where(np.abs(anglegradient) > 0, anglegradient, b)
firstarray = anglegradient > 0
secondarray = np.copy(firstarray)
secondarray[1:] = secondarray[0:-1]
secondarray[0] = not secondarray[0]
inflectionpoints = np.where(firstarray != secondarray)[0]
if len(inflectionpoints) < 2:
yield self._array_multi_sort(angles, *vars_)
else:
d = [angles[:inflectionpoints[1]]]
for l in vars_:
d.append(l[0:inflectionpoints[1]:])
yield self._array_multi_sort(*tuple(d))
i = 1
while i < len(inflectionpoints) - 1:
if inflectionpoints[i + 1] - inflectionpoints[i] < 2:
inflectionpoints[i + 1] = inflectionpoints[i]
i += 1
continue
d = [angles[inflectionpoints[i]: inflectionpoints[i + 1]]]
for l in vars_:
d.append(l[inflectionpoints[i]: inflectionpoints[i + 1]])
i += 1
yield self._array_multi_sort(*tuple(d))
# the final point is not an inflection point so now we need to
# return the final scan
d = [angles[inflectionpoints[i]:]]
for l in vars_:
d.append(l[inflectionpoints[i]:])
yield self._array_multi_sort(*tuple(d))
def close(self, filename):
raise Exception('Close is undefined for the FlySpec backend')
def get_item(self, path):
_e = path.split('/')
id = _e[1]
name = _e[2]
ref_o = ResourceIdentifier(id).get_referred_object()
return ref_o.__dict__[name]
def set_item(self, path, value):
_e = path.split('/')
id = _e[1]
name = _e[2]
ref_o = ResourceIdentifier(id).get_referred_object()
ref_o.__dict__[name] = value
def create_item(self, path, value):
pass
def new(self, filename=None):
self._root = FlySpecPlugin()
@staticmethod
def get_format():
return 'flyspec'
if __name__ == '__main__':
import doctest
doctest.testmod()
|
gpl-3.0
| 7,344,202,771,692,823,000 | 38.42654 | 81 | 0.538406 | false | 3.796896 | false | false | false |
maggienj/ActiveData
|
tests/test_jx/test_set_ops.py
|
1
|
34963
|
# encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski ([email protected])
#
from __future__ import division
from __future__ import unicode_literals
from mo_dots import wrap
from mo_math import Math
from unittest import skipIf
from jx_python.query import DEFAULT_LIMIT, MAX_LIMIT
from tests.test_jx import BaseTestCase, TEST_TABLE, global_settings, NULL
lots_of_data = wrap([{"a": i} for i in range(30)])
class TestSetOps(BaseTestCase):
def test_star(self):
test = {
"data": [{"a": 1}],
"query": {
"select": "*",
"from": TEST_TABLE
},
"expecting_list": {
"meta": {"format": "list"}, "data": [{"a": 1}]
}
}
self.utils.execute_tests(test)
def test_simplest(self):
test = {
"data": [
{"a": "b"}
],
"query": {
"from": TEST_TABLE,
"select": "a"
},
"expecting_list": {
"meta": {"format": "list"}, "data": ["b"]},
"expecting_table": {
"meta": {"format": "table"},
"header": ["a"],
"data": [["b"]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 1, "interval": 1}
}
],
"data": {
"a": ["b"]
}
}
}
self.utils.execute_tests(test)
def test_select_on_missing_field(self):
test = {
"data": [
{"a": {"b": {"c": 1}}},
{"a": {"b": {"c": 2}}},
{"a": {"b": {"c": 3}}},
{"a": {"b": {"c": 4}}},
{"a": {"b": {"c": 5}}}
],
"query": {
"from": TEST_TABLE,
"select": "a.b.d"
},
"expecting_list": {
"meta": {"format": "list"}, "data": [
{},
{},
{},
{},
{}
]},
"expecting_table": {
"meta": {"format": "table"},
"header": ["a.b.d"],
"data": [[NULL], [NULL], [NULL], [NULL], [NULL]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 5, "interval": 1}
}
],
"data": {
"a.b.d": [NULL, NULL, NULL, NULL, NULL]
}
}
}
self.utils.execute_tests(test)
def test_select_on_shallow_missing_field(self):
test = {
"data": [
{"a": {"b": {"c": 1}}},
{"a": {"b": {"c": 2}}},
{"a": {"b": {"c": 3}}},
{"a": {"b": {"c": 4}}},
{"a": {"b": {"c": 5}}}
],
"query": {
"from": TEST_TABLE,
"select": "d"
},
"expecting_list": {
"meta": {"format": "list"}, "data": [
{},
{},
{},
{},
{}
]},
"expecting_table": {
"meta": {"format": "table"},
"header": ["d"],
"data": [[NULL], [NULL], [NULL], [NULL], [NULL]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 5, "interval": 1}
}
],
"data": {
"d": [NULL, NULL, NULL, NULL, NULL]
}
}
}
self.utils.execute_tests(test)
def test_single_deep_select(self):
test = {
"data": [
{"a": {"b": {"c": 1}}},
{"a": {"b": {"c": 2}}},
{"a": {"b": {"c": 3}}},
{"a": {"b": {"c": 4}}},
{"a": {"b": {"c": 5}}}
],
"query": {
"from": TEST_TABLE,
"select": "a.b.c",
"sort": "a.b.c" # SO THE CUBE COMPARISON WILL PASS
},
"expecting_list": {
"meta": {"format": "list"},
"data": [1, 2, 3, 4, 5]
},
"expecting_table": {
"meta": {"format": "table"},
"header": ["a.b.c"],
"data": [[1], [2], [3], [4], [5]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 5, "interval": 1}
}
],
"data": {
"a.b.c": [1, 2, 3, 4, 5]
}
}
}
self.utils.execute_tests(test)
def test_single_select_alpha(self):
test = {
"data": [
{"a": "b"}
],
"query": {
"from": TEST_TABLE,
"select": "a"
},
"expecting_list": {
"meta": {"format": "list"}, "data": ["b"]},
"expecting_table": {
"meta": {"format": "table"},
"header": ["a"],
"data": [["b"]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 1, "interval": 1}
}
],
"data": {
"a": ["b"]
}
}
}
self.utils.execute_tests(test)
def test_single_rename(self):
test = {
"name": "rename singleton alpha",
"data": [
{"a": "b"}
],
"query": {
"from": TEST_TABLE,
"select": {"name": "value", "value": "a"}
},
"expecting_list": {
"meta": {"format": "list"}, "data": ["b"]},
"expecting_table": {
"meta": {"format": "table"},
"header": ["value"],
"data": [["b"]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 1, "interval": 1}
}
],
"data": {
"value": ["b"]
}
}
}
self.utils.execute_tests(test)
def test_single_no_select(self):
test = {
"data": [
{"a": "b"}
],
"query": {
"from": TEST_TABLE
},
"expecting_list": {
"meta": {"format": "list"}, "data": [
{"a": "b"}
]},
"expecting_table": {
"meta": {"format": "table"},
"header": ["."],
"data": [[{"a": "b"}]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 1, "interval": 1}
}
],
"data": {
".": [{"a": "b"}]
}
}
}
self.utils.execute_tests(test)
def test_id_select(self):
"""
ALWAYS GOOD TO HAVE AN ID, CALL IT "_id"
"""
test = {
"data": [
{"a": "b"}
],
"query": {
"select": "_id",
"from": TEST_TABLE
},
"expecting_list": {
"meta": {"format": "list"}, "data": [
{"_id": Math.is_hex}
]},
"expecting_table": {
"meta": {"format": "table"},
"header": ["_id"],
"data": [[Math.is_hex]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 1, "interval": 1}
}
],
"data": {
"_id": [Math.is_hex]
}
}
}
self.utils.execute_tests(test)
def test_id_value_select(self):
"""
ALWAYS GOOD TO HAVE AN ID, CALL IT "_id"
"""
test = {
"data": [
{"a": "b"}
],
"query": {
"select": "_id",
"from": TEST_TABLE
},
"expecting_list": {
"meta": {"format": "list"},
"data": [
Math.is_hex
]
}
}
self.utils.execute_tests(test)
def test_single_star_select(self):
test = {
"data": [
{"a": "b"}
],
"query": {
"select": "*",
"from": TEST_TABLE
},
"expecting_list": {
"meta": {"format": "list"}, "data": [
{"a": "b"}
]},
"expecting_table": {
"meta": {"format": "table"},
"header": ["a"],
"data": [["b"]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 1, "interval": 1}
}
],
"data": {
"a": ["b"]
}
}
}
self.utils.execute_tests(test)
def test_dot_select(self):
test = {
"data": [
{"a": "b"}
],
"query": {
"select": {"name": "value", "value": "."},
"from": TEST_TABLE
},
"expecting_list": {
"meta": {"format": "list"},
"data": [{"a": "b"}]
},
"expecting_table": {
"meta": {"format": "table"},
"header": ["value"],
"data": [[{"a": "b"}]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 1, "interval": 1}
}
],
"data": {
"value": [{"a": "b"}]
}
}
}
self.utils.execute_tests(test)
@skipIf(global_settings.use == "elasticsearch", "ES only accepts objects, not values")
def test_list_of_values(self):
test = {
"data": ["a", "b"],
"query": {
"from": TEST_TABLE
},
"expecting_list": {
"meta": {"format": "list"},
"data": [
"a", "b"
]
},
"expecting_table": {
"meta": {"format": "table"},
"header": ["."],
"data": [["a"], ["b"]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 2, "interval": 1}
}
],
"data": {
".": ["a", "b"]
}
}
}
self.utils.execute_tests(test)
def test_select_all_from_list_of_objects(self):
test = {
"data": [
{"a": "b"},
{"a": "d"}
],
"query": {
"from": TEST_TABLE,
"select": "*"
},
"expecting_list": {
"meta": {"format": "list"},
"data": [
{"a": "b"},
{"a": "d"}
]
},
"expecting_table": {
"meta": {"format": "table"},
"header": ["a"],
"data": [
["b"],
["d"]
]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 2, "interval": 1}
}
],
"data": {
"a": ["b", "d"]
}
}
}
self.utils.execute_tests(test)
@skipIf(True, "Too complicated")
def test_select_into_children(self):
test = {
"name": "select into children to table",
"metadata": {
"properties": {
"x": {"type": "integer"},
"a": {
"type": "nested",
"properties": {
"y": {
"type": "string"
},
"b": {
"type": "nested",
"properties": {
"c": {"type": "integer"},
"1": {"type": "integer"}
}
},
"z": {
"type": "string"
}
}
}
}
},
"data": [
{"x": 5},
{
"a": [
{
"b": {"c": 13},
"y": "m"
},
{
"b": [
{"c": 17, "1": 27},
{"c": 19}
],
"y": "q"
},
{
"y": "r"
}
],
"x": 3
},
{
"a": {"b": {"c": 23}},
"x": 7
},
{
"a": {"b": [
{"c": 29, "1": 31},
{"c": 37, "1": 41},
{"1": 47},
{"c": 53, "1": 59}
]},
"x": 11
}
],
"query": {
"from": TEST_TABLE + ".a.b",
"select": ["...x", "c"]
},
"expecting_list": {
"meta": {"format": "list"},
"data": [
{"x": 5, "c": NULL},
{"x": 3, "c": 13},
{"x": 3, "c": 17},
{"x": 3, "c": 19},
{"x": 7, "c": 23},
{"x": 11, "c": 29},
{"x": 11, "c": 37},
{"x": 11, "c": NULL},
{"x": 11, "c": 53}
]},
"expecting_table": {
"meta": {"format": "table"},
"header": ["x", "c"],
"data": [
[5, NULL],
[3, 13],
[3, 17],
[3, 19],
[7, 23],
[11, 29],
[11, 37],
[11, NULL],
[11, 53]
]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "index",
"domain": {"type": "rownum", "min": 0, "max": 9, "interval": 1}
}
],
"data": {
"x": [5, 3, 3, 3, 7, 11, 11, 11, 11],
"c": [NULL, 13, 17, 19, 23, 29, 37, NULL, 53]
}
}
}
self.utils.execute_tests(test)
@skipIf(global_settings.use=="sqlite", "no need for limit when using own resources")
def test_max_limit(self):
test = wrap({
"data": lots_of_data,
"query": {
"from": TEST_TABLE,
"select": {"name": "value", "value": "a"},
"limit": 1000000000
}
})
self.utils.fill_container(test)
result = self.utils.execute_query(test.query)
self.assertEqual(result.meta.es_query.size, MAX_LIMIT)
def test_default_limit(self):
test = wrap({
"data": lots_of_data,
"query": {
"from": TEST_TABLE,
"select": {"name": "value", "value": "a"},
},
})
self.utils.fill_container(test)
test.query.format = "list"
result = self.utils.execute_query(test.query)
self.assertEqual(len(result.data), DEFAULT_LIMIT)
test.query.format = "table"
result = self.utils.execute_query(test.query)
self.assertEqual(len(result.data), DEFAULT_LIMIT)
test.query.format = "cube"
result = self.utils.execute_query(test.query)
self.assertEqual(len(result.data.value), DEFAULT_LIMIT)
def test_specific_limit(self):
test = wrap({
"data": lots_of_data,
"query": {
"from": TEST_TABLE,
"select": {"name": "value", "value": "a"},
"limit": 5
},
})
self.utils.fill_container(test)
test.query.format = "list"
result = self.utils.execute_query(test.query)
self.assertEqual(len(result.data), 5)
test.query.format = "table"
result = self.utils.execute_query(test.query)
self.assertEqual(len(result.data), 5)
test.query.format = "cube"
result = self.utils.execute_query(test.query)
self.assertEqual(len(result.data.value), 5)
def test_negative_limit(self):
test = wrap({
"data": lots_of_data,
"query": {
"from": TEST_TABLE,
"select": {"name": "value", "value": "a"},
"limit": -1
},
})
self.utils.fill_container(test)
test.query.format = "list"
self.assertRaises(Exception, self.utils.execute_query, test.query)
def test_select_w_star(self):
test = {
"data": [
{"a": {"b": 0, "c": 0}, "d": 7},
{"a": {"b": 0, "c": 1}},
{"a": {"b": 1, "c": 0}},
{"a": {"b": 1, "c": 1}}
],
"query": {
"from": TEST_TABLE,
"select": "*",
"sort": ["a.b", "a.c"]
},
"expecting_list": {
"meta": {"format": "list"}, "data": [
{"a.b": 0, "a.c": 0, "d": 7},
{"a.b": 0, "a.c": 1},
{"a.b": 1, "a.c": 0},
{"a.b": 1, "a.c": 1}
]
},
"expecting_table": {
"meta": {"format": "table"},
"header": ["a.b", "a.c", "d"],
"data": [
[0, 0, 7],
[0, 1, NULL],
[1, 0, NULL],
[1, 1, NULL]
]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 4, "interval": 1}
}
],
"data": {
"a.b": [0, 0, 1, 1],
"a.c": [0, 1, 0, 1],
"d": [7, NULL, NULL, NULL]
}
}
}
self.utils.execute_tests(test)
def test_select_w_deep_star(self):
test = {
"data": [
{"a": {"b": 0, "c": 0}},
{"a": {"b": 0, "c": 1}},
{"a": {"b": 1, "c": 0}},
{"a": {"b": 1, "c": 1}},
],
"query": {
"from": TEST_TABLE,
"select": "a.*",
"sort": ["a.b", "a.c"]
},
"expecting_list": {
"meta": {"format": "list"}, "data": [
{"a.b": 0, "a.c": 0},
{"a.b": 0, "a.c": 1},
{"a.b": 1, "a.c": 0},
{"a.b": 1, "a.c": 1}
]},
"expecting_table": {
"meta": {"format": "table"},
"header": ["a.b", "a.c"],
"data": [
[0, 0],
[0, 1],
[1, 0],
[1, 1]
]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 4, "interval": 1}
}
],
"data": {
"a.b": [0, 0, 1, 1],
"a.c": [0, 1, 0, 1]
}
}
}
self.utils.execute_tests(test)
def test_select_expression(self):
test = {
"data": [
{"a": {"b": 0, "c": 0}},
{"a": {"b": 0, "c": 1}},
{"a": {"b": 1, "c": 0}},
{"a": {"b": 1, "c": 1}},
],
"query": {
"from": TEST_TABLE,
"select": [
{"name": "sum", "value": {"add": ["a.b", "a.c"]}},
{"name": "sub", "value": {"sub": ["a.b", "a.c"]}}
],
"sort": ["a.b", "a.c"]
},
"expecting_list": {
"meta": {"format": "list"}, "data": [
{"sum": 0, "sub": 0},
{"sum": 1, "sub": -1},
{"sum": 1, "sub": 1},
{"sum": 2, "sub": 0}
]},
"expecting_table": {
"meta": {"format": "table"},
"header": ["sum", "sub"],
"data": [[0, 0], [1, -1], [1, 1], [2, 0]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 4, "interval": 1}
}
],
"data": {
"sum": [0, 1, 1, 2],
"sub": [0, -1, 1, 0]
}
}
}
self.utils.execute_tests(test)
def test_select_object(self):
"""
ES DOES NOT ALLOW YOU TO SELECT AN OBJECT, ONLY THE LEAVES
THIS SHOULD USE THE SCHEMA TO SELECT-ON-OBJECT TO MANY SELECT ON LEAVES
"""
test = {
"data": [
{"o": 3, "a": {"b": "x", "v": 2}},
{"o": 1, "a": {"b": "x", "v": 5}},
{"o": 2, "a": {"b": "x", "v": 7}},
{"o": 4, "c": "x"}
],
"query": {
"from": TEST_TABLE,
"select": ["a"],
"sort": "a.v"
},
"expecting_list": {
"meta": {"format": "list"},
"data": [
{"a": {"b": "x", "v": 2}},
{"a": {"b": "x", "v": 5}},
{"a": {"b": "x", "v": 7}},
{}
]
},
"expecting_table": {
"meta": {"format": "table"},
"header": ["a"],
"data": [
[{"b": "x", "v": 2}],
[{"b": "x", "v": 5}],
[{"b": "x", "v": 7}],
[{}]
]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 4, "interval": 1}
}
],
"data": {
"a": [
{"b": "x", "v": 2},
{"b": "x", "v": 5},
{"b": "x", "v": 7},
{}
]
}
}
}
self.utils.execute_tests(test)
def test_select_leaves(self):
"""
ES DOES NOT ALLOW YOU TO SELECT AN OBJECT, ONLY THE LEAVES
THIS SHOULD USE THE SCHEMA TO SELECT-ON-OBJECT TO MANY SELECT ON LEAVES
"""
test = {
"data": [
{"o": 3, "a": {"b": "x", "v": 2}},
{"o": 1, "a": {"b": "x", "v": 5}},
{"o": 2, "a": {"b": "x", "v": 7}},
{"o": 4, "c": "x"}
],
"query": {
"from": TEST_TABLE,
"select": ["a.*"],
"sort": "a.v"
},
"expecting_list": {
"meta": {"format": "list"},
"data": [
{"a.b": "x", "a.v": 2},
{"a.b": "x", "a.v": 5},
{"a.b": "x", "a.v": 7},
{}
]
},
"expecting_table": {
"meta": {"format": "table"},
"header": ["a.b", "a.v"],
"data": [
["x", 2],
["x", 5],
["x", 7],
[NULL, NULL]
]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 4, "interval": 1}
}
],
"data": {
"a.b": ["x", "x", "x", NULL],
"a.v": [2, 5, 7, NULL]
}
}
}
self.utils.execute_tests(test)
def test_select_value_object(self):
"""
ES DOES NOT ALLOW YOU TO SELECT AN OBJECT, ONLY THE LEAVES
THIS SHOULD USE THE SCHEMA TO SELECT-ON-OBJECT TO MANY SELECT ON LEAVES
"""
test = {
"data": [
{"o": 3, "a": {"b": "x", "v": 2}},
{"o": 1, "a": {"b": "x", "v": 5}},
{"o": 2, "a": {"b": "x", "v": 7}},
{"o": 4, "c": "x"}
],
"query": {
"from": TEST_TABLE,
"select": "a",
"sort": "a.v"
},
"expecting_list": {
"meta": {"format": "list"},
"data": [
{"b": "x", "v": 2},
{"b": "x", "v": 5},
{"b": "x", "v": 7},
{}
]
},
"expecting_table": {
"meta": {"format": "table"},
"header": ["a"],
"data": [
[{"b": "x", "v": 2}],
[{"b": "x", "v": 5}],
[{"b": "x", "v": 7}],
[{}]
]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 4, "interval": 1}
}
],
"data": {
"a": [
{"b": "x", "v": 2},
{"b": "x", "v": 5},
{"b": "x", "v": 7},
{}
]
}
}
}
self.utils.execute_tests(test)
def test_select2_object(self):
"""
ES DOES NOT ALLOW YOU TO SELECT AN OBJECT, ONLY THE LEAVES
THIS SHOULD USE THE SCHEMA TO SELECT-ON-OBJECT TO MANY SELECT ON LEAVES
"""
test = {
"data": [
{"o": 3, "a": {"b": "x", "v": 2}},
{"o": 1, "a": {"b": "x", "v": 5}},
{"o": 2, "a": {"b": "x", "v": 7}},
{"o": 4, "c": "x"}
],
"query": {
"from": TEST_TABLE,
"select": ["o", "a"],
"sort": "a.v"
},
"expecting_list": {
"meta": {"format": "list"},
"data": [
{"o": 3, "a": {"b": "x", "v": 2}},
{"o": 1, "a": {"b": "x", "v": 5}},
{"o": 2, "a": {"b": "x", "v": 7}},
{"o": 4}
]
},
"expecting_table": {
"meta": {"format": "table"},
"header": ["o", "a"],
"data": [
[3, {"b": "x", "v": 2}],
[1, {"b": "x", "v": 5}],
[2, {"b": "x", "v": 7}],
[4, {}]
]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 4, "interval": 1}
}
],
"data": {
"a": [
{"b": "x", "v": 2},
{"b": "x", "v": 5},
{"b": "x", "v": 7},
{}
],
"o": [3, 1, 2, 4]
}
}
}
self.utils.execute_tests(test)
def test_select3_object(self):
"""
ES DOES NOT ALLOW YOU TO SELECT AN OBJECT, ONLY THE LEAVES
THIS SHOULD USE THE SCHEMA TO SELECT-ON-OBJECT TO MANY SELECT ON LEAVES
"""
test = {
"data": [
{"o": 3, "a": {"b": "x", "v": 2}},
{"o": 1, "a": {"b": "x", "v": 5}},
{"o": 2, "a": {"b": "x", "v": 7}},
{"o": 4, "c": "x"}
],
"query": {
"from": TEST_TABLE,
"select": ["o", "a.*"],
"sort": "a.v"
},
"expecting_list": {
"meta": {"format": "list"},
"data": [
{"o": 3, "a.b": "x", "a.v": 2},
{"o": 1, "a.b": "x", "a.v": 5},
{"o": 2, "a.b": "x", "a.v": 7},
{"o": 4}
]
},
"expecting_table": {
"meta": {"format": "table"},
"header": ["o", "a.b", "a.v"],
"data": [
[3, "x", 2],
[1, "x", 5],
[2, "x", 7],
[4, NULL, NULL]
]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 4, "interval": 1}
}
],
"data": {
"a.b": ["x", "x", "x", NULL],
"a.v": [2, 5, 7, NULL],
"o": [3, 1, 2, 4]
}
}
}
self.utils.execute_tests(test)
def test_select_nested_column(self):
test = {
"data": [
{"_a": [{"b": 1, "c": 1}, {"b": 2, "c": 1}]},
{"_a": [{"b": 1, "c": 2}, {"b": 2, "c": 2}]}
],
"query": {
"from": TEST_TABLE,
"select": "_a"
},
"expecting_list": {
"meta": {"format": "list"},
"data": [
[{"b": 1, "c": 1}, {"b": 2, "c": 1}],
[{"b": 1, "c": 2}, {"b": 2, "c": 2}]
]
},
"expecting_table": {
"meta": {"format": "table"},
"header": ["_a"],
"data": [
[[{"b": 1, "c": 1}, {"b": 2, "c": 1}]],
[[{"b": 1, "c": 2}, {"b": 2, "c": 2}]]
]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 2, "interval": 1}
}
],
"data": {
"_a": [
[{"b": 1, "c": 1}, {"b": 2, "c": 1}],
[{"b": 1, "c": 2}, {"b": 2, "c": 2}]
]
}
}
}
self.utils.execute_tests(test)
|
mpl-2.0
| -3,831,913,277,298,706,400 | 29.995567 | 90 | 0.263736 | false | 4.12008 | true | false | false |
jameswatt2008/jameswatt2008.github.io
|
python/Python基础/截图和代码/加强/老王开枪/老王开枪-7-创建敌人.py
|
1
|
2720
|
class Person(object):
"""人的类"""
def __init__(self, name):
super(Person, self).__init__()
self.name = name
self.gun = None#用来保存枪对象的引用
self.hp = 100
def anzhuang_zidan(self, dan_jia_temp, zi_dan_temp):
"""把子弹装到弹夹中"""
#弹夹.保存子弹(子弹)
dan_jia_temp.baocun_zidan(zi_dan_temp)
def anzhuang_danjia(self, gun_temp, dan_jia_temp):
"""把弹夹安装到枪中"""
#枪.保存弹夹(弹夹)
gun_temp.baocun_danjia(dan_jia_temp)
def naqiang(self, gun_temp):
"""拿起一把枪"""
self.gun = gun_temp
def __str__(self):
if self.gun:
return "%s的血量为:%d, 他有枪 %s"%(self.name, self.hp, self.gun)
else:
return "%s的血量为%d, 他没有枪"%(self.name, self.hp)
class Gun(object):
"""枪类"""
def __init__(self, name):
super(Gun, self).__init__()
self.name = name#用来记录枪的类型
self.danjia = None#用来记录弹夹对象的引用
def baocun_danjia(self, dan_jia_temp):
"""用一个属性来保存这个弹夹对象的引用"""
self.danjia = dan_jia_temp
def __str__(self):
if self.danjia:
return "枪的信息为:%s, %s"%(self.name, self.danjia)
else:
return "枪的信息为:%s,这把枪中没有弹夹"%(self.name)
class Danjia(object):
"""弹夹类"""
def __init__(self, max_num):
super(Danjia, self).__init__()
self.max_num = max_num#用来记录弹夹的最大容量
self.zidan_list = []#用来记录所有的子弹的引用
def baocun_zidan(self, zi_dan_temp):
"""将这颗子弹保存"""
self.zidan_list.append(zi_dan_temp)
def __str__(self):
return "弹夹的信息为:%d/%d"%(len(self.zidan_list), self.max_num)
class Zidan(object):
"""子弹类"""
def __init__(self, sha_shang_li):
super(Zidan, self).__init__()
self.sha_shang_li = sha_shang_li#这颗子弹的威力
def main():
"""用来控制整个程序的流程"""
#1. 创建老王对象
laowang = Person("老王")
#2. 创建一个枪对象
ak47 = Gun("AK47")
#3. 创建一个弹夹对象
dan_jia = Danjia(20)
#4. 创建一些子弹
for i in range(15):
zi_dan = Zidan(10)
#5. 老王把子弹安装到弹夹中
#老王.安装子弹到弹夹中(弹夹,子弹)
laowang.anzhuang_zidan(dan_jia, zi_dan)
#6. 老王把弹夹安装到枪中
#老王.安装弹夹到枪中(枪,弹夹)
laowang.anzhuang_danjia(ak47, dan_jia)
#test:测试弹夹的信息
#print(dan_jia)
#test:测试枪的信息
#print(ak47)
#7. 老王拿枪
#老王.拿枪(枪)
laowang.naqiang(ak47)
#test:测试老王对象
print(laowang)
#8. 创建一个敌人
gebi_laosong = Person("隔壁老宋")
print(gebi_laosong)
#9. 老王开枪打敌人
if __name__ == '__main__':
main()
|
gpl-2.0
| 8,752,744,205,176,716,000 | 17.552632 | 60 | 0.62772 | false | 1.534107 | false | false | false |
ThibaultReuille/graphiti
|
Scripts/console/edition.py
|
1
|
5823
|
import script
from script import *
class Info(script.Script):
def run(self, args):
self.console.log("{0} nodes, {1} edges.".format(og.count_nodes(), og.count_edges()))
class Load(script.Script):
def run(self, args):
if len(args) < 2:
self.console.log("Usage: {0} <filename>".format(args[0]))
return
std.load_json(" ".join(args[1:]))
class Save(script.Script):
def run(self, args):
if len(args) != 2:
self.console.log("Usage: {0} <filename>".format(args[0]))
return
if os.path.isfile(args[1]):
self.console.log("Error: File already exists!")
return
std.save_json(args[1])
self.console.log("File saved in '{0}'.".format(args[1]))
class Screenshot(script.Script):
def run(self, args):
if len(args) != 2 and len(args) != 3:
self.console.log("Usage: {0} <filename> [<factor>]".format(args[0]))
return
if os.path.isfile(args[1]):
self.console.log("Error: File {0} already exists!".format(args[1]))
return
filename = args[1]
try:
factor = float(args[2])
except:
factor = 1.0
if not filename.lower().endswith('.tga'):
self.console.log("Extension not recognized, needs to be TGA")
return
og.screenshot(filename, factor)
self.console.log("Screenshot with factor {0} saved in '{1}'.".format(factor, filename))
class Clear(script.Script):
def clear_graph(self):
for id in og.get_node_ids():
og.remove_node(id)
def clear_colors(self):
og.set_attribute("og:space:edgemode", "string", "node_color")
for n in og.get_node_ids():
og.set_node_attribute(n, "og:space:color", "vec4", "1.0 1.0 1.0 1.0")
def clear_icons(self):
for n in og.get_node_ids():
og.set_node_attribute(n, "og:space:icon", "string", "shapes/disk")
def clear_activity(self):
for n in og.get_node_ids():
og.set_node_attribute(n, "og:space:activity", "float", "0.0")
for e in og.get_edge_ids():
og.set_edge_attribute(e, "og:space:activity", "float", "0.0")
def clear_lod(self):
for n in og.get_node_ids():
og.set_node_attribute(n, "og:space:lod", "float", "1.0")
for e in og.get_edge_ids():
og.set_edge_attribute(e, "og:space:lod", "float", "1.0")
def run(self, args):
if len(args) == 2 and args[1] == "graph":
self.clear_graph()
elif len(args) == 2 and args[1] == "colors":
self.clear_colors()
elif len(args) == 2 and args[1] == "icons":
self.clear_icons()
elif len(args) == 2 and args[1] == "activity":
self.clear_activity()
elif len(args) == 2 and args[1] == "lod":
self.clear_lod()
else:
self.console.log("Usage: {0} [graph|colors|icons|activity|lod]".format(args[0]))
class Set(script.Script):
def __init__(self, console):
super(Set, self).__init__(console)
def run(self, args):
if len(args) < 3:
self.console.log("Usage: {0} <type> <name> <value>".format(args[0]))
return
for key in self.console.query.keys():
entity_type = key[:-1] # TODO : Hack!! find a better way to do this. This removes the ending 's'
for entity_id in self.console.query[key]:
self.console.api.set_attribute(entity_type, entity_id, args[2], args[1], " ".join(args[3:]))
class Get(script.Script):
def __init__(self, console):
super(Get, self).__init__(console)
def run(self, args):
if len(args) < 2:
self.console.log("Usage: {0} <name>".format(args[0]))
return
for key in self.console.query.keys():
entity_type = key[:-1] # TODO : Hack!! find a better way to do this. This removes the ending 's'
result = dict()
for entity_id in self.console.query[key]:
result[entity_id] = self.console.api.get_attribute(entity_type, entity_id, args[1])
self.console.log("{0}: {1}".format(key, json.dumps(result)))
class Remove(script.Script):
def __init__(self, console):
super(Remove, self).__init__(console)
def run(self, args):
if 'edges' in self.console.query:
[ og.remove_edge(eid) for eid in self.console.query['edges'] ]
if 'nodes' in self.console.query:
[ og.remove_node(nid) for nid in self.console.query['nodes'] ]
class Map(script.Script):
def __init__(self, console):
super(Map, self).__init__(console)
def attr_convert(self, src_type, src_value, dst_type, options):
if src_type != dst_type:
raise Exception("Mapping from {0} to {1} not supported!".format(src_type, dst_type))
if dst_type == "vec2":
return std.vec2_to_str(src_value)
elif dst_type == "vec3":
return std.vec3_to_str(value)
elif dst_type == "vec4":
return std.vec4_to_str(value)
else:
if len(options) == 2 and options[0] == "--format":
value = options[1].format(src_value)
return value
else:
return "{0}".format(src_value)
def lambda_map(self, element_type, element_id, src_type, src_name, dst_type, dst_name, options = None):
if element_type == "node":
source = og.get_node_attribute(element_id, src_name)
target = self.attr_convert(src_type, source, dst_type, options)
self.console.log("og.set_node_attribute({0}, {1}, {2}, {3})".format(element_id, dst_name, dst_type, target))
og.set_node_attribute(element_id, dst_name, dst_type, target)
elif element_type == "edge":
source = og.get_edge_attribute(element_id, src_name)
target = self.attr_convert(src_type, source, dst_type, options)
og.set_edge_attribute(element_id, dst_name, dst_type, target)
def run(self, args):
if len(args) < 6 and args[3] == 'to':
self.console.log("Usage: {0} <src type> <src attribute> to <dst type> <dst attribute> [options]".format(args[0]))
return
if 'nodes' in self.console.query:
for nid in self.console.query['nodes']:
self.lambda_map("node", nid, args[1], args[2], args[4], args[5], args[6:])
if 'edges' in self.console.query:
for eid in self.console.query['edges']:
self.lambda_map("edge", eid, args[1], args[2], args[4], args[5], args[6:])
|
bsd-2-clause
| -4,776,052,747,866,240,000 | 32.471264 | 116 | 0.638502 | false | 2.738946 | false | false | false |
nanshihui/PocCollect
|
component/fast_cgi/fast_cgi.py
|
1
|
2068
|
#!/usr/bin/env python
# encoding: utf-8
from t import T
import socket
class P(T):
def __init__(self):
T.__init__(self)
def verify(self,head='',context='',ip='',port='',productname={},keywords='',hackinfo=''):
timeout=3
result = {}
result['result']=False
target_url='http://'+ip+':'+port
socket.setdefaulttimeout(timeout)
client_socket=None
# 测试是否有leak
try:
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client_socket.connect((ip, 9000))
data = """
01 01 00 01 00 08 00 00 00 01 00 00 00 00 00 00
01 04 00 01 00 8f 01 00 0e 03 52 45 51 55 45 53
54 5f 4d 45 54 48 4f 44 47 45 54 0f 08 53 45 52
56 45 52 5f 50 52 4f 54 4f 43 4f 4c 48 54 54 50
2f 31 2e 31 0d 01 44 4f 43 55 4d 45 4e 54 5f 52
4f 4f 54 2f 0b 09 52 45 4d 4f 54 45 5f 41 44 44
52 31 32 37 2e 30 2e 30 2e 31 0f 0b 53 43 52 49
50 54 5f 46 49 4c 45 4e 41 4d 45 2f 65 74 63 2f
70 61 73 73 77 64 0f 10 53 45 52 56 45 52 5f 53
4f 46 54 57 41 52 45 67 6f 20 2f 20 66 63 67 69
63 6c 69 65 6e 74 20 00 01 04 00 01 00 00 00 00
"""
data_s = ''
for _ in data.split():
data_s += chr(int(_, 16))
client_socket.send(data_s)
ret = client_socket.recv(1024)
if ret.find(':root:') > 0:
result['result']=True
result['VerifyInfo'] = {}
result['VerifyInfo']['type']='fast-cgi Vulnerability'
result['VerifyInfo']['URL'] =target_url
result['VerifyInfo']['payload']=data_s
result['VerifyInfo']['result'] =ret
except:
pass
finally:
if client_socket is not None:
client_socket.close()
return result
if __name__ == '__main__':
print P().verify(ip='58.220.22.101',port='80')
|
mit
| 5,356,842,197,419,762,000 | 28.414286 | 93 | 0.499514 | false | 3.542169 | false | false | false |
klmitch/python-keystoneclient
|
keystoneclient/contrib/auth/v3/oidc.py
|
1
|
8795
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from positional import positional
from keystoneclient import access
from keystoneclient.auth.identity.v3 import federated
class OidcPassword(federated.FederatedBaseAuth):
"""Implement authentication plugin for OpenID Connect protocol.
OIDC or OpenID Connect is a protocol for federated authentication.
The OpenID Connect specification can be found at::
``http://openid.net/specs/openid-connect-core-1_0.html``
"""
@classmethod
def get_options(cls):
options = super(OidcPassword, cls).get_options()
options.extend([
cfg.StrOpt('username', help='Username'),
cfg.StrOpt('password', secret=True, help='Password'),
cfg.StrOpt('client-id', help='OAuth 2.0 Client ID'),
cfg.StrOpt('client-secret', secret=True,
help='OAuth 2.0 Client Secret'),
cfg.StrOpt('access-token-endpoint',
help='OpenID Connect Provider Token Endpoint'),
cfg.StrOpt('scope', default="profile",
help='OpenID Connect scope that is requested from OP')
])
return options
@positional(4)
def __init__(self, auth_url, identity_provider, protocol,
username, password, client_id, client_secret,
access_token_endpoint, scope='profile',
grant_type='password'):
"""The OpenID Connect plugin expects the following:
:param auth_url: URL of the Identity Service
:type auth_url: string
:param identity_provider: Name of the Identity Provider the client
will authenticate against
:type identity_provider: string
:param protocol: Protocol name as configured in keystone
:type protocol: string
:param username: Username used to authenticate
:type username: string
:param password: Password used to authenticate
:type password: string
:param client_id: OAuth 2.0 Client ID
:type client_id: string
:param client_secret: OAuth 2.0 Client Secret
:type client_secret: string
:param access_token_endpoint: OpenID Connect Provider Token Endpoint,
for example:
https://localhost:8020/oidc/OP/token
:type access_token_endpoint: string
:param scope: OpenID Connect scope that is requested from OP,
defaults to "profile", for example: "profile email"
:type scope: string
:param grant_type: OpenID Connect grant type, it represents the flow
that is used to talk to the OP. Valid values are:
"authorization_code", "refresh_token", or
"password".
:type grant_type: string
"""
super(OidcPassword, self).__init__(auth_url, identity_provider,
protocol)
self._username = username
self._password = password
self.client_id = client_id
self.client_secret = client_secret
self.access_token_endpoint = access_token_endpoint
self.scope = scope
self.grant_type = grant_type
@property
def username(self):
# Override to remove deprecation.
return self._username
@username.setter
def username(self, value):
# Override to remove deprecation.
self._username = value
@property
def password(self):
# Override to remove deprecation.
return self._password
@password.setter
def password(self, value):
# Override to remove deprecation.
self._password = value
def get_unscoped_auth_ref(self, session):
"""Authenticate with OpenID Connect and get back claims.
This is a multi-step process. First an access token must be retrieved,
to do this, the username and password, the OpenID Connect client ID
and secret, and the access token endpoint must be known.
Secondly, we then exchange the access token upon accessing the
protected Keystone endpoint (federated auth URL). This will trigger
the OpenID Connect Provider to perform a user introspection and
retrieve information (specified in the scope) about the user in
the form of an OpenID Connect Claim. These claims will be sent
to Keystone in the form of environment variables.
:param session: a session object to send out HTTP requests.
:type session: keystoneclient.session.Session
:returns: a token data representation
:rtype: :py:class:`keystoneclient.access.AccessInfo`
"""
# get an access token
client_auth = (self.client_id, self.client_secret)
payload = {'grant_type': self.grant_type, 'username': self.username,
'password': self.password, 'scope': self.scope}
response = self._get_access_token(session, client_auth, payload,
self.access_token_endpoint)
access_token = response.json()['access_token']
# use access token against protected URL
headers = {'Authorization': 'Bearer ' + access_token}
response = self._get_keystone_token(session, headers,
self.federated_token_url)
# grab the unscoped token
token = response.headers['X-Subject-Token']
token_json = response.json()['token']
return access.AccessInfoV3(token, **token_json)
def _get_access_token(self, session, client_auth, payload,
access_token_endpoint):
"""Exchange a variety of user supplied values for an access token.
:param session: a session object to send out HTTP requests.
:type session: keystoneclient.session.Session
:param client_auth: a tuple representing client id and secret
:type client_auth: tuple
:param payload: a dict containing various OpenID Connect values, for
example::
{'grant_type': 'password', 'username': self.username,
'password': self.password, 'scope': self.scope}
:type payload: dict
:param access_token_endpoint: URL to use to get an access token, for
example: https://localhost/oidc/token
:type access_token_endpoint: string
"""
op_response = session.post(self.access_token_endpoint,
requests_auth=client_auth,
data=payload,
authenticated=False)
return op_response
def _get_keystone_token(self, session, headers, federated_token_url):
"""Exchange an acess token for a keystone token.
By Sending the access token in an `Authorization: Bearer` header, to
an OpenID Connect protected endpoint (Federated Token URL). The
OpenID Connect server will use the access token to look up information
about the authenticated user (this technique is called instrospection).
The output of the instrospection will be an OpenID Connect Claim, that
will be used against the mapping engine. Should the mapping engine
succeed, a Keystone token will be presented to the user.
:param session: a session object to send out HTTP requests.
:type session: keystoneclient.session.Session
:param headers: an Authorization header containing the access token.
:type headers_: dict
:param federated_auth_url: Protected URL for federated authentication,
for example: https://localhost:5000/v3/\
OS-FEDERATION/identity_providers/bluepages/\
protocols/oidc/auth
:type federated_auth_url: string
"""
auth_response = session.post(self.federated_token_url,
headers=headers,
authenticated=False)
return auth_response
|
apache-2.0
| -3,890,712,068,428,765 | 40.880952 | 79 | 0.61444 | false | 4.861802 | false | false | false |
Yarichi/Proyecto-DASI
|
Malmo/Python_Examples/mission_quit_command_example.py
|
1
|
4421
|
# ------------------------------------------------------------------------------------------------
# Copyright (c) 2016 Microsoft Corporation
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
# associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
# NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# ------------------------------------------------------------------------------------------------
# Quit command example
import MalmoPython
import os
import sys
import time
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0) # flush print output immediately
missionXML='''<?xml version="1.0" encoding="UTF-8" standalone="no" ?>
<Mission xmlns="http://ProjectMalmo.microsoft.com" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<About>
<Summary>If at first you don't succeed, give up.</Summary>
</About>
<ServerSection>
<ServerHandlers>
<FlatWorldGenerator generatorString="3;7,220*1,5*3,2;3;,biome_1"/>
<ServerQuitFromTimeUp timeLimitMs="30000"/>
<ServerQuitWhenAnyAgentFinishes/>
</ServerHandlers>
</ServerSection>
<AgentSection mode="Survival">
<Name>QuitBot</Name>
<AgentStart/>
<AgentHandlers>
<ObservationFromFullStats/>
<ContinuousMovementCommands turnSpeedDegs="180"/>
<ChatCommands />
<MissionQuitCommands quitDescription="give_up"/>
<RewardForMissionEnd>
<Reward description="give_up" reward="-1000"/>
</RewardForMissionEnd>
</AgentHandlers>
</AgentSection>
</Mission>'''
# Create default Malmo objects:
agent_host = MalmoPython.AgentHost()
try:
agent_host.parse( sys.argv )
except RuntimeError as e:
print 'ERROR:',e
print agent_host.getUsage()
exit(1)
if agent_host.receivedArgument("help"):
print agent_host.getUsage()
exit(0)
my_mission = MalmoPython.MissionSpec(missionXML, True)
my_mission_record = MalmoPython.MissionRecordSpec()
# Attempt to start a mission:
max_retries = 3
for retry in range(max_retries):
try:
agent_host.startMission( my_mission, my_mission_record )
break
except RuntimeError as e:
if retry == max_retries - 1:
print "Error starting mission:",e
exit(1)
else:
time.sleep(2)
# Loop until mission starts:
print "Waiting for the mission to start ",
world_state = agent_host.getWorldState()
while not world_state.has_mission_begun:
sys.stdout.write(".")
time.sleep(0.1)
world_state = agent_host.getWorldState()
for error in world_state.errors:
print "Error:",error.text
print
print "Mission running ",
count = 0
# Loop until mission ends:
while world_state.is_mission_running:
sys.stdout.write(".")
time.sleep(0.5)
if count == 10:
print
print "Giving up!"
agent_host.sendCommand("quit")
count += 1
world_state = agent_host.getWorldState()
for error in world_state.errors:
print "Error:",error.text
for reward in world_state.rewards:
print "Reward:",reward.getValue()
print
print "Mission ended"
# Mission has ended.
|
gpl-2.0
| -3,410,355,032,639,231,500 | 35.151261 | 117 | 0.598055 | false | 4.182592 | false | false | false |
ema/conpaas
|
conpaas-services/src/conpaas/services/htc/manager/configuration.py
|
1
|
4786
|
'''
Created on Jul 23, 2013
@author: Vlad
'''
import random
class Configuration:
def __init__(self,types_list, cost_list, limit_list):
self.keys = dict(zip(types_list, range(len(types_list))))
self.averages = {} # dictionary of averages with k as a machine type
self.rav = {} # dictionary of averages with k as a machine type
self.notasks = {} # dictionary of averages with k as a machine type
self.throughput = {} # dictionary of tasks with relevant time unit as k
self.conf = {}
self.costs = dict(zip(types_list,cost_list))
self.limits = dict(zip(types_list,limit_list))
self.ratios={}
for k in self.keys:
self.costs[self.keys[k]]=self.costs[k]
del self.costs[k]
self.limits[self.keys[k]]=self.limits[k]
del self.limits[k]
self.notasks[self.keys[k]] = 0
self.averages[self.keys[k]] = 0
self.rav[self.keys[k]] = 0
self.conf[self.keys[k]]= 0
random.seed()
self.conf_dict = {}
self.m = {}
def relevant_time_unit(self):
rk = random.choice(self.averages.keys())
t=60
self.throughput[rk] = round(t / self.averages[rk])
self.unit = t
for k in self.costs:
self.costs[k] *= float(self.unit)/3600
self.compute_throughput()
return self.unit
def compute_throughput(self):
for k in self.averages:
self.throughput[k] = round(self.unit / self.rav[k])
def set_average(self,m_type,value, count):
if self.keys[m_type] in self.averages.keys():
self.averages[self.keys[m_type]]=value
self.notasks[self.keys[m_type]] +=count
if m_type=='small':
self.rav[self.keys[m_type]]= value
if m_type=='medium':
self.rav[self.keys[m_type]]= value/4
if m_type=='large':
self.rav[self.keys[m_type]] = value/8
def compute_ratios(self):
for k in self.costs:
self.ratios[k] = round(self.costs[k]/self.throughput[k], 5 )
def compute_tmax(self):
tmax = 0
for k in self.throughput:
tmax += self.limits[k]*self.throughput[k]
return tmax
def cost_conf(self):
c = 0
for k in self.costs:
c += self.conf[k]*self.costs[k]
return c
def cheap_check(self,start,target):
cheap_list = self.costs.values()
sorted_list = sorted(self.costs.values())
cheap = 0
for p in sorted_list:
kp = cheap_list.index(p)
if start + self.throughput[kp] > target and kp in self.ratios.keys() :
self.conf[kp]+=1
cheap=1
break
return cheap
def compute_configuration(self, target):
for k in self.averages:
self.conf[k]= 0
self.compute_ratios()
start = 0
while start < target and len(self.ratios)>0:
if self.cheap_check(start, target) ==1:
return self.conf
r = self.ratios.values()
m = min(r)
for km in self.ratios:
if self.ratios[km] == m:
break
while self.limits[km] > self.conf[km]:
start+=self.throughput[km]
self.conf[km]+=1
if start >= target:
return self.conf
if self.cheap_check(start, target) == 1:
return self.conf
del self.ratios[km]
return self.conf
def dynamic_configuration(self):
tmax = self.compute_tmax()
for k in self.limits:
self.conf[k]=self.limits[k]
t = tmax - 1
self.conf_dict = {}
self.conf_dict[tmax] = self.conf
self.m = {}
self.m[tmax] = self.cost_conf()
while t >= 0:
self.m[t]=self.m[t+1]
km = -1
for k in self.throughput:
if tmax - self.throughput[k] >= t:
if self.m[t] > self.m[t+self.throughput[k]] - self.costs[k] and self.conf_dict[t+self.throughput[k]][k]>0:
self.m[t] = self.m[t+self.throughput[k]] - self.costs[k]
km = k
if km > -1:
self.conf_dict[t] = self.conf_dict[t+self.throughput[km]].copy()
self.conf_dict[t][km] -= 1
else:
self.conf_dict[t] = self.conf_dict[t+1].copy()
t-=1
self.m[0]=0
return self.m
|
bsd-3-clause
| -8,171,769,272,402,431,000 | 32.236111 | 126 | 0.494985 | false | 3.563663 | false | false | false |
omerwe/LEAP
|
leapUtils.py
|
1
|
10998
|
import numpy as np
from optparse import OptionParser
import scipy.linalg as la
import scipy.stats as stats
import scipy.linalg.blas as blas
import pandas as pd
import csv
import time
import fastlmm.util.VertexCut as vc
from pysnptools.snpreader.bed import Bed
import pysnptools.util as pstutil
import pysnptools.util.pheno as phenoUtils
np.set_printoptions(precision=3, linewidth=200)
def loadData(bfile, extractSim, phenoFile, missingPhenotype='-9', loadSNPs=False, standardize=True):
bed = Bed(bfile, count_A1=True)
if (extractSim is not None):
f = open(extractSim)
csvReader = csv.reader(f)
extractSnpsSet = set([])
for l in csvReader: extractSnpsSet.add(l[0])
f.close()
keepSnpsInds = [i for i in range(bed.sid.shape[0]) if bed.sid[i] in extractSnpsSet]
bed = bed[:, keepSnpsInds]
phe = None
if (phenoFile is not None): bed, phe = loadPheno(bed, phenoFile, missingPhenotype)
if (loadSNPs):
bed = bed.read()
if (standardize): bed = bed.standardize()
return bed, phe
def loadPheno(bed, phenoFile, missingPhenotype='-9', keepDict=False):
pheno = phenoUtils.loadOnePhen(phenoFile, missing=missingPhenotype, vectorize=True)
checkIntersection(bed, pheno, 'phenotypes')
bed, pheno = pstutil.intersect_apply([bed, pheno])
if (not keepDict): pheno = pheno['vals']
return bed, pheno
def checkIntersection(bed, fileDict, fileStr, checkSuperSet=False):
bedSet = set((b[0], b[1]) for b in bed.iid)
fileSet = set((b[0], b[1]) for b in fileDict['iid'])
if checkSuperSet:
if (not fileSet.issuperset(bedSet)): raise Exception(fileStr + " file does not include all individuals in the bfile")
intersectSet = bedSet.intersection(fileSet)
if (len(intersectSet) != len (bedSet)):
print(len(intersectSet), 'individuals appear in both the plink file and the', fileStr, 'file')
def symmetrize(a):
return a + a.T - np.diag(a.diagonal())
def loadRelatedFile(bed, relFile):
relatedDict = phenoUtils.loadOnePhen(relFile, vectorize=True)
checkIntersection(bed, relatedDict, 'relatedness', checkSuperSet=True)
_, relatedDict = pstutil.intersect_apply([bed, relatedDict])
related = relatedDict['vals']
keepArr = (related < 0.5)
print(np.sum(~keepArr), 'individuals will be removed due to high relatedness')
return keepArr
def findRelated(bed, cutoff, kinshipFile=None):
if (kinshipFile is None):
print('Computing kinship matrix...')
t0 = time.time()
XXT = symmetrize(blas.dsyrk(1.0, bed.val, lower=1) / bed.val.shape[1])
print('Done in %0.2f'%(time.time()-t0), 'seconds')
else:
XXT = np.loadtxt(kinshipFile)
#Find related individuals
removeSet = set(np.sort(vc.VertexCut().work(XXT, cutoff))) #These are the indexes of the IIDs to remove
print('Marking', len(removeSet), 'individuals to be removed due to high relatedness')
#keepArr = np.array([(1 if iid in keepSet else 0) for iid in bed.iid], dtype=bool)
keepArr = np.ones(bed.iid.shape[0], dtype=bool)
for i in removeSet: keepArr[i] = False
return keepArr
def eigenDecompose(XXT, ignore_neig=False):
t0 = time.time()
print('Computing eigendecomposition...')
s,U = la.eigh(XXT)
if (not ignore_neig and (np.min(s) < -1e-4)): raise Exception('Negative eigenvalues found')
s[s<0]=0
ind = np.argsort(s)
ind = ind[s>1e-12]
U = U[:, ind]
s = s[ind]
print('Done in %0.2f'%(time.time()-t0), 'seconds')
return s,U
def loadCovars(bed, covarFile):
covarsDict = phenoUtils.loadPhen(covarFile)
checkIntersection(bed, covarsDict, 'covariates', checkSuperSet=True)
_, covarsDict = pstutil.intersect_apply([bed, covarsDict])
covar = covarsDict['vals']
return covar
def getSNPCovarsMatrix(bed, resfile, pthresh, mindist):
snpNameToNumDict = dict([])
for i,s in enumerate(bed.sid): snpNameToNumDict[s] = i
f = open(resfile)
csvReader = csv.reader(f, delimiter="\t")
next(csvReader)
significantSNPs = []
significantSNPNames = []
lastPval = 0
featuresPosList = []
for l in csvReader:
snpName, pVal = l[0], float(l[4])
if (pVal < lastPval): raise Exception('P-values are not sorted in descending order: ' + str(pVal) + ">" + str(lastPval))
lastPval = pVal
if (pVal > pthresh): break
if (snpName not in snpNameToNumDict): continue
significantSNPNames.append(snpName)
if (mindist == 0):
significantSNPs.append(snpNameToNumDict[snpName])
print('Using SNP', snpName, 'with p<%0.2e'%pVal, 'as a fixed effect')
else:
posArr = bed.pos[snpNameToNumDict[snpName]]
chrom, pos = posArr[0], int(posArr[2])
addSNP = True
for (c,p) in featuresPosList:
if (chrom == c and abs(pos-p) < mindist):
addSNP = False
break
if addSNP:
significantSNPs.append(snpNameToNumDict[snpName])
featuresPosList.append((chrom, pos))
print('Using SNP', snpName, '('+str(int(chrom))+':'+str(pos)+') with p<%0.2e'%pVal, 'as a fixed effect')
f.close()
snpCovarsMat = bed.val[:, significantSNPs]
return snpCovarsMat
def getExcludedChromosome(bfile, chrom):
bed = Bed(bfile, count_A1=True)
indsToKeep = (bed.pos[:,0] != chrom)
bed = bed[:, indsToKeep]
return bed.read().standardize()
def getChromosome(bfile, chrom):
bed = Bed(bfile, count_A1=True)
indsToKeep = (bed.pos[:,0] == chrom)
bed = bed[:, indsToKeep]
return bed.read().standardize()
def _fixupBedAndPheno(bed, pheno, missingPhenotype='-9'):
bed = _fixupBed(bed)
bed, pheno = _fixup_pheno(pheno, bed, missingPhenotype)
return bed, pheno
def _fixupBed(bed):
if isinstance(bed, str):
return Bed(bed, count_A1=True).read().standardize()
else: return bed
def _fixup_pheno(pheno, bed=None, missingPhenotype='-9'):
if (isinstance(pheno, str)):
if (bed is not None):
bed, pheno = loadPheno(bed, pheno, missingPhenotype, keepDict=True)
return bed, pheno
else:
phenoDict = phenoUtils.loadOnePhen(pheno, missing=missingPhenotype, vectorize=True)
return phenoDict
else:
if (bed is not None): return bed, pheno
else: return pheno
def linreg(bed, pheno):
#Extract snps and phenotype
bed, pheno = _fixupBedAndPheno(bed, pheno)
if isinstance(pheno, dict): phe = pheno['vals']
else: phe = pheno
if (len(phe.shape)==2):
if (phe.shape[1]==1): phe=phe[:,0]
else: raise Exception('More than one phenotype found')
#Normalize y. We assume X is already normalized.
y = phe - phe.mean(); y /= y.std()
#Compute p-values
Xy = bed.val.T.dot(y) / y.shape[0]
Xy[Xy>1.0] = 1.0
Xy[Xy<-1.0] = -1.0
df = y.shape[0]-2
TINY = 1.0e-20
t = Xy * np.sqrt(df / ((1.0-Xy+TINY) * (1.0+Xy+TINY)))
pValT = stats.t.sf(np.abs(t), df)*2
#Create pandas data frame
items = [
('SNP', bed.sid),
('Chr', bed.pos[:,0]),
('GenDist', bed.pos[:,1]),
('ChrPos', bed.pos[:,2]),
('PValue', pValT),
]
frame = pd.DataFrame.from_items(items)
frame.sort("PValue", inplace=True)
frame.index = np.arange(len(frame))
return frame
def powerPlot(df, causalSNPs, title=''):
import pylab
causalSNPs = set(causalSNPs)
csnpPvals = df[df['SNP'].isin(causalSNPs)]["PValue"]
pvalPoints = np.logspace(-6, -2, num=1000)
power = [np.mean(csnpPvals < p ) for p in list(pvalPoints)]
pylab.plot(-np.log10(pvalPoints), power)
pylab.xlabel("-log10(Significance Threshold)")
pylab.ylabel("Power")
pylab.title(title)
def computeCovar(bed, shrinkMethod, fitIndividuals):
eigen = dict([])
if (shrinkMethod in ['lw', 'oas', 'l1', 'cv']):
import sklearn.covariance as cov
t0 = time.time()
print('Estimating shrunk covariance using', shrinkMethod, 'estimator...')
if (shrinkMethod == 'lw'): covEstimator = cov.LedoitWolf(assume_centered=True, block_size = 5*bed.val.shape[0])
elif (shrinkMethod == 'oas'): covEstimator = cov.OAS(assume_centered=True)
elif (shrinkMethod == 'l1'): covEstimator = cov.GraphLassoCV(assume_centered=True, verbose=True)
elif (shrinkMethod == 'cv'):
shrunkEstimator = cov.ShrunkCovariance(assume_centered=True)
param_grid = {'shrinkage': [0.01, 0.1, 0.3, 0.5, 0.7, 0.9, 0.99]}
covEstimator = sklearn.grid_search.GridSearchCV(shrunkEstimator, param_grid)
else: raise Exception('unknown covariance regularizer')
covEstimator.fit(bed.val[fitIndividuals, :].T)
if (shrinkMethod == 'l1'):
alpha = covEstimator.alpha_
print('l1 alpha chosen:', alpha)
covEstimator2 = cov.GraphLasso(alpha=alpha, assume_centered=True, verbose=True)
else:
if (shrinkMethod == 'cv'): shrinkEstimator = clf.best_params_['shrinkage']
else: shrinkEstimator = covEstimator.shrinkage_
print('shrinkage estimator:', shrinkEstimator)
covEstimator2 = cov.ShrunkCovariance(shrinkage=shrinkEstimator, assume_centered=True)
covEstimator2.fit(bed.val.T)
XXT = covEstimator2.covariance_ * bed.val.shape[1]
print('Done in %0.2f'%(time.time()-t0), 'seconds')
else:
print('Computing kinship matrix...')
t0 = time.time()
XXT = symmetrize(blas.dsyrk(1.0, bed.val, lower=1))
print('Done in %0.2f'%(time.time()-t0), 'seconds')
try: shrinkParam = float(shrinkMethod)
except: shrinkParam = -1
if (shrinkMethod == 'mylw'):
XXT_fit = XXT[np.ix_(fitIndividuals, fitIndividuals)]
sE2R = (np.sum(XXT_fit**2) - np.sum(np.diag(XXT_fit)**2)) / (bed.val.shape[1]**2)
#temp = (bed.val**2).dot((bed.val.T)**2)
temp = symmetrize(blas.dsyrk(1.0, bed.val[fitIndividuals, :]**2, lower=1))
sER2 = (temp.sum() - np.diag(temp).sum()) / bed.val.shape[1]
shrinkParam = (sER2 - sE2R) / (sE2R * (bed.val.shape[1]-1))
if (shrinkParam > 0):
print('shrinkage estimator:', 1-shrinkParam)
XXT = (1-shrinkParam)*XXT + bed.val.shape[1]*shrinkParam*np.eye(XXT.shape[0])
return XXT
def standardize(X, method, optionsDict):
fitIndividuals = np.ones(X.shape[0], dtype=np.bool)
if (method == 'frq'):
empMean = X.mean(axis=0) / 2.0
X[:, empMean>0.5] = 2 - X[:, empMean>0.5]
print('regularizng SNPs according to frq file...')
frqFile = (optionsDict['bfilesim']+'.frq' if (optionsDict['frq'] is None) else optionsDict['frq'])
mafs = np.loadtxt(frqFile, usecols=[1,2]).mean(axis=1)
snpsMean = 2*mafs
snpsStd = np.sqrt(2*mafs*(1-mafs))
elif (method == 'related'):
if (optionsDict['related'] is None): raise Exception('related file not supplied')
print('regularizng SNPs according to non-related individuals...')
relLines = np.loadtxt(optionsDict['related'], usecols=[2])
keepArr = (relLines != 1)
print('Excluding', np.sum(~keepArr), 'from the covariance matrix standardization')
snpsMean = X[keepArr, :].mean(axis=0)
snpsStd = X[keepArr, :].std(axis=0)
fitIndividuals = keepArr
elif (method == 'controls'):
phe = optionsDict['pheno']
pheThreshold = phe.mean()
controls = (phe<pheThreshold)
print('regularizng SNPs according to controls...')
snpsMean = X[controls, :].mean(axis=0)
snpsStd = X[controls, :].std(axis=0)
fitIndividuals = controls
elif (method is None):
snpsMean = X.mean(axis=0)
snpsStd = X.std(axis=0)
else:
raise Exception('unknown SNP standardization option: ' + method)
X -= snpsMean,
X /= snpsStd
return X, fitIndividuals
|
apache-2.0
| 3,903,449,392,061,788,700 | 32.530488 | 122 | 0.68367 | false | 2.639942 | false | false | false |
puruckertom/ubertool
|
ubertool/varroapop/varroapop_functions.py
|
1
|
11627
|
from __future__ import division #brings in Python 3.0 mixed type calculation rules
import logging
import json
import requests
import math
import pandas as pd
import os
rest_url_varroapop = os.environ.get('OPENCPU_REST_SERVER')
#rest_url_varroapop = 'http://localhost'
if not os.environ.get('OPENCPU_REST_SERVER'):
rest_url_varroapop = 'http://172.20.100.18:5656'
class VarroapopFunctions(object):
"""
Function class for Stir.
"""
def __init__(self):
"""Class representing the functions for VarroaPop"""
super(VarroapopFunctions, self).__init__()
def call_varroapop_api(self):
logging.info("=========== formatting Varroapop JSON payload")
input_json = self.format_varroapop_payload()
logging.info("=========== calling Varroapop windows REST API")
called_endpoint = (rest_url_varroapop + '/ocpu/apps/quanted/VarroaPopWrapper/R/RunVarroaPop/json')
logging.info(called_endpoint)
http_headers = {'Content-Type': 'application/json'}
logging.info("JSON payload:")
print(input_json)
return requests.post(called_endpoint, headers=http_headers, data=input_json, timeout=60)
def fill_model_out_attr(self, output_json):
outputs = json.loads(json.loads(output_json)[0])
self.out_date = self.out_date.append(pd.Series(outputs.get('Date')))
self.out_colony_size = self.out_colony_size.append(pd.Series(outputs.get('Colony.Size')))
self.out_adult_drones = self.out_adult_drones.append(pd.Series(outputs.get('Adult.Drones')))
self.out_adult_workers = self.out_adult_workers.append(pd.Series(outputs.get('Adult.Workers')))
self.out_foragers = self.out_foragers.append(pd.Series(outputs.get('Foragers')))
self.out_capped_drone_brood = self.out_capped_drone_brood.append(pd.Series(outputs.get('Capped.Drone.Brood')))
self.out_capped_worker_brood = self.out_capped_worker_brood.append(pd.Series(outputs.get('Capped.Worker.Brood')))
self.out_drone_larvae = self.out_drone_larvae.append(pd.Series(outputs.get('Drone.Larvae')))
self.out_worker_larvae =self.out_worker_larvae.append(pd.Series(outputs.get('Worker.Larvae')))
self.out_drone_eggs = self.out_drone_eggs.append(pd.Series(outputs.get('Drone.Eggs')))
self.out_worker_eggs = self.out_worker_eggs.append(pd.Series(outputs.get('Worker.Eggs')))
self.out_free_mites = self.out_free_mites.append(pd.Series(outputs.get('Free.Mites')))
self.out_drone_brood_mites =self.out_drone_brood_mites.append(pd.Series(outputs.get('Drone.Brood.Mites')))
self.out_worker_brood_mites =self.out_worker_brood_mites.append(pd.Series(outputs.get('Worker.Brood.Mites')))
self.out_drone_mites_per_cell = self.out_drone_mites_per_cell.append(pd.Series(outputs.get('Mites.Drone.Cell')))
self.out_worker_mites_per_cell = self.out_worker_mites_per_cell.append(pd.Series(outputs.get('Mites.Worker.Cell')))
self.out_mites_dying = self.out_mites_dying.append(pd.Series(outputs.get('Mites.Dying')))
self.out_proportion_mites_dying =self.out_proportion_mites_dying.append(pd.Series(outputs.get('Proportion.Mites.Dying')))
self.out_colony_pollen = self.out_colony_pollen.append(pd.Series(outputs.get('Colony.Pollen..g.')))
self.out_chemical_conc_pollen =self.out_chemical_conc_pollen.append(pd.Series(outputs.get('Pollen.Pesticide.Concentration')))
self.out_colony_nectar = self.out_colony_nectar.append(pd.Series(outputs.get('Colony.Nectar')))
self.out_chemical_conc_nectar =self.out_chemical_conc_nectar.append(pd.Series(outputs.get('Nectar.Pesticide.Concentration')))
self.out_dead_drone_larvae = self.out_dead_drone_larvae.append(pd.Series(outputs.get('Dead.Drone.Larvae')))
self.out_dead_worker_larvae =self.out_dead_worker_larvae.append(pd.Series(outputs.get('Dead.Worker.Larvae')))
self.out_dead_drone_adults = self.out_dead_drone_adults.append(pd.Series(outputs.get('Dead.Drone.Adults')))
self.out_dead_worker_adults = self.out_dead_worker_adults.append(pd.Series(outputs.get('Dead.Worker.Adults')))
self.out_dead_foragers = self.out_dead_foragers.append(pd.Series(outputs.get('Dead.Foragers')))
self.out_queen_strength = self.out_queen_strength.append(pd.Series(outputs.get('Queen.Strength')))
self.out_average_temp_c = self.out_average_temp_c.append(pd.Series(outputs.get('Average.Temperature..celsius.')))
self.out_rain_inch = self.out_rain_inch.append(pd.Series(outputs.get('Rain')))
def fill_summary_stats(self):
self.out_mean_colony_size = self.out_mean_colony_size.append(pd.Series(self.out_colony_size.mean()))
self.out_max_colony_size = self.out_max_colony_size.append(pd.Series(self.out_colony_size.max()))
self.out_min_colony_size = self.out_min_colony_size.append(pd.Series(self.out_colony_size.min()))
self.out_total_bee_mortality = self.out_total_bee_mortality.append(pd.Series(sum([self.out_dead_drone_adults.sum(),
self.out_dead_drone_larvae.sum(),
self.out_dead_worker_adults.sum(),
self.out_dead_worker_larvae.sum(),
self.out_dead_foragers.sum()])))
self.out_max_chemical_conc_pollen = self.out_max_chemical_conc_pollen.append(pd.Series(self.out_chemical_conc_pollen.max()))
self.out_max_chemical_conc_nectar = self.out_max_chemical_conc_nectar.append(pd.Series(self.out_chemical_conc_nectar.max()))
def fill_sessionid(self, sessionid):
self.out_api_sessionid = self.out_api_sessionid.append(pd.Series(sessionid))
def format_varroapop_payload(self):
input_dict = self.pd_obj.to_dict('records')[0]
weather_loc = input_dict.pop('weather_location')
print('Weather location: '+ weather_loc )
input_dict = self.collapse_dates(input_dict)
input_dict = self.rename_inputs(input_dict)
input_dict = self.remove_unused_inputs(input_dict)
data = json.dumps({'parameters':input_dict, 'weather_file':weather_loc})
return data
def collapse_dates(self, input_dict):
sim_start_keys = ['SimStart_month', 'SimStart_day', 'SimStart_year']
input_dict['SimStart'] = "/".join([str(int(input_dict.get(key))) for key in sim_start_keys])
sim_end_keys = ['SimEnd_month', 'SimEnd_day', 'SimEnd_year']
input_dict['SimEnd'] = "/".join([str(int(input_dict.get(key))) for key in sim_end_keys])
requeen_date_keys = ['RQReQueenDate_month', 'RQReQueenDate_day', 'RQReQueenDate_year']
input_dict['RQReQueenDate'] = "/".join([str(int(input_dict.get(key))) for key in requeen_date_keys])
imm_start_keys = ['ImmStart_month', 'ImmStart_day', 'ImmStart_year']
input_dict['ImmStart'] = "/".join([str(int(input_dict.get(key))) for key in imm_start_keys])
imm_end_keys = ['ImmEnd_month', 'ImmEnd_day', 'ImmEnd_year']
input_dict['ImmEnd'] = "/".join([str(int(input_dict.get(key))) for key in imm_end_keys])
vt_treatment_start_keys = ['VTTreatmentStart_month', 'VTTreatmentStart_day', 'VTTreatmentStart_year']
input_dict['VTTreatmentStart'] = "/".join([str(int(input_dict.get(key))) for key in vt_treatment_start_keys])
foliar_app_date_keys = ['FoliarAppDate_month', 'FoliarAppDate_day', 'FoliarAppDate_year']
input_dict['FoliarAppDate'] = "/".join([str(int(input_dict.get(key))) for key in foliar_app_date_keys])
foliar_forage_begin_keys = ['FoliarForageBegin_month', 'FoliarForageBegin_day', 'FoliarForageBegin_year']
input_dict['FoliarForageBegin'] = "/".join([str(int(input_dict.get(key))) for key in foliar_forage_begin_keys])
foliar_forage_end_keys = ['FoliarForageEnd_month', 'FoliarForageEnd_day', 'FoliarForageEnd_year']
input_dict['FoliarForageEnd'] = "/".join([str(int(input_dict.get(key))) for key in foliar_forage_end_keys])
soil_forage_begin_keys = ['SoilForageBegin_month', 'SoilForageBegin_day', 'SoilForageBegin_year']
input_dict['SoilForageBegin'] = "/".join([str(int(input_dict.get(key))) for key in soil_forage_begin_keys])
soil_forage_end_keys = ['SoilForageEnd_month', 'SoilForageEnd_day', 'SoilForageEnd_year']
input_dict['SoilForageEnd'] = "/".join([str(int(input_dict.get(key))) for key in soil_forage_end_keys])
seed_forage_begin_keys = ['SeedForageBegin_month', 'SeedForageBegin_day', 'SeedForageBegin_year']
input_dict['SeedForageBegin'] = "/".join([str(int(input_dict.get(key))) for key in seed_forage_begin_keys])
seed_forage_end_keys = ['SeedForageEnd_month', 'SeedForageEnd_day', 'SeedForageEnd_year']
input_dict['SeedForageEnd'] = "/".join([str(int(input_dict.get(key))) for key in seed_forage_end_keys])
sup_pollen_begin_keys = ['SupPollenBegin_month', 'SupPollenBegin_day', 'SupPollenBegin_year']
input_dict['SupPollenBegin'] = "/".join([str(int(input_dict.get(key))) for key in sup_pollen_begin_keys])
sup_pollen_end_keys = ['SupPollenEnd_month', 'SupPollenEnd_day', 'SupPollenEnd_year']
input_dict['SupPollenEnd'] = "/".join([str(int(input_dict.get(key))) for key in sup_pollen_end_keys])
sup_nectar_begin_keys = ['SupNectarBegin_month', 'SupNectarBegin_day', 'SupNectarBegin_year']
input_dict['SupNectarBegin'] = "/".join([str(int(input_dict.get(key))) for key in sup_nectar_begin_keys])
sup_nectar_end_keys = ['SupNectarEnd_month', 'SupNectarEnd_day', 'SupNectarEnd_year']
input_dict['SupNectarEnd'] = "/".join([str(int(input_dict.get(key))) for key in sup_nectar_end_keys])
inputs_to_remove = sum([sim_start_keys,sim_end_keys,requeen_date_keys,imm_start_keys,
imm_end_keys,vt_treatment_start_keys,foliar_app_date_keys,
foliar_forage_begin_keys, foliar_forage_end_keys,soil_forage_begin_keys,
soil_forage_end_keys, seed_forage_begin_keys, seed_forage_end_keys,
sup_pollen_begin_keys, sup_pollen_end_keys, sup_nectar_begin_keys, sup_nectar_end_keys], [])
[input_dict.pop(k, None) for k in inputs_to_remove]
return input_dict
def rename_inputs(self, input_dict):
input_dict['EAppRate'] = input_dict.pop('ar_lb')
input_dict['AIKOW'] = math.exp(input_dict.pop('l_kow'))
input_dict['AIKOC'] = input_dict.pop('k_oc')
return input_dict
def remove_unused_inputs(self, input_dict):
keys = list(input_dict.keys())
to_remove = [i for i in keys if i[0].islower()]
for k in to_remove:
input_dict.pop(k, None)
return input_dict
def get_input_file(self, api_sessionid):
file_endpoint = (rest_url_varroapop + '/ocpu/tmp/' + api_sessionid + '/files/')
return requests.get(file_endpoint+'vp_input.txt')
def get_log_file(self, api_sessionid):
file_endpoint = (rest_url_varroapop + '/ocpu/tmp/' + api_sessionid + '/files/')
return requests.get(file_endpoint+'vp_log.txt')
def get_results_file(self, api_sessionid):
file_endpoint = (rest_url_varroapop + '/ocpu/tmp/' + api_sessionid + '/files/')
return requests.get(file_endpoint+'vp_results.txt')
|
unlicense
| -3,814,458,858,010,616,000 | 65.821839 | 133 | 0.649609 | false | 3.002066 | false | false | false |
TravelModellingGroup/TMGToolbox
|
TMGToolbox/src/network_editing/NCS11/conversion/convert_VDFs.py
|
1
|
8469
|
'''
Copyright 2014 Travel Modelling Group, Department of Civil Engineering, University of Toronto
This file is part of the TMG Toolbox.
The TMG Toolbox is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The TMG Toolbox is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with the TMG Toolbox. If not, see <http://www.gnu.org/licenses/>.
'''
#---METADATA---------------------
'''
Convert VDFs
Authors: Michael Hain
Latest revision by: Peter Kucirek
Converts VDF indices from DMG2001 to NCS11
'''
#---VERSION HISTORY
'''
0.1.0 Created by Michael Hain
0.2.0 Updated by Peter Kucirek
'''
import inro.modeller as _m
import traceback as _traceback
from contextlib import contextmanager
from contextlib import nested
_util = _m.Modeller().module('tmg.common.utilities')
_tmgTPB = _m.Modeller().module('tmg.common.TMG_tool_page_builder')
##########################################################################################################
class ConvertVDFs(_m.Tool()):
version = '0.2.0'
tool_run_msg = ""
#---Special instance types
scenario = _m.Attribute(_m.InstanceType) #
makeChangesPermanent = _m.Attribute(bool) #
def __init__(self):
self.networkCalculator = _m.Modeller().tool("inro.emme.network_calculation.network_calculator")
def page(self):
pb = _m.ToolPageBuilder(self, title="Convert VDFs v%s" %self.version,
description="Converts link classification types (stored as VDF ids) from \
DMG2001 to NCS11.",
branding_text="- TMG Toolbox")
if self.tool_run_msg != "": # to display messages in the page
pb.tool_run_status(self.tool_run_msg_status)
pb.add_select_scenario(tool_attribute_name="scenario",
title="Select a scenario",
allow_none=False)
pb.add_checkbox(tool_attribute_name="makeChangesPermanent",
title="Make changes permanent?",
note="If unchecked, new VDF values will be stored in link extra attribute '@vdf'.")
return pb.render()
##########################################################################################################
def run(self):
self.tool_run_msg = ""
if self.makeChangesPermanent is None: # Fix the checkbox problem
self.makeChangesPermanent = False;
try:
self._execute()
except Exception as e:
self.tool_run_msg = _m.PageBuilder.format_exception(
e, _traceback.format_exc())
raise
self.tool_run_msg = _m.PageBuilder.format_info("Tool complete.")
##########################################################################################################
def _execute(self):
with _m.logbook_trace(name="Convert Link VDFs v%s" %self.version,
attributes=self._getAtts()):
with self._vdfAttributeMANAGER() as self.attr:
with _m.logbook_trace("Calculating new VDFs into attribute %s" %self.attr.id):
self._assignVDFToLinkSubSet(11, "vdf=11,12") # Urban freeways
self._assignVDFToLinkSubSet(12, "vdf=13,14") # Freeways
self._assignVDFToLinkSubSet(13, "vdf=21,24") # Freeway ramps
self._assignVDFToLinkSubSet(14, "vdf=15") # Tolled freeways
self._assignVDFToLinkSubSet(15, "vdf=25") # Tolled freeway ramps
self._assignVDFToLinkSubSet(16, "vdf=99 and not length=0 and ul2=100,9999") # Freeways HOV lanes
self._assignVDFToLinkSubSet(17, "vdf=99 and length=0,0.1 and ul2=100,9999") # Freeway HOV ramps
self._assignVDFToLinkSubSet(20, "vdf=30,39 and lanes=1 and ul3=600,9999") # Two-lane rural roads
self._assignVDFToLinkSubSet(21, "vdf=30,39 and lanes=2,99 and ul3=600,9999") # Multi-lane rural roads
self._assignVDFToLinkSubSet(22, "vdf=30,39 and ul3=0,599")
self._assignVDFToLinkSubSet(22, "type=217,219 or type=224 or type=325 or type=537 or type=700,999 and vdf=40,49")
self._assignVDFToLinkSubSet(22, "type=217,219 or type=224 or type=325 or type=537 or type=700,999 and vdf=60,69")
self._assignVDFToLinkSubSet(30, "vdf=40,49 and %s=0" %self.attr.id) # Assign only to links which have not already been assigned.
self._assignVDFToLinkSubSet(30, "vdf=30,39 and type=0,112")
self._assignVDFToLinkSubSet(40, "vdf=50,59 and ul3=700,9999")
self._assignVDFToLinkSubSet(41, "vdf=99 and ul2=0,99")
self._assignVDFToLinkSubSet(42, "vdf=50,59 and ul3=0,699")
self._assignVDFToLinkSubSet(50, "vdf=60,69 and %s=0 and lanes=2,99 and ul3=401,9999" %self.attr.id)
self._assignVDFToLinkSubSet(51, "lanes=1 or ul3=0,400 and vdf=60,69 and %s=0" %self.attr.id)
self._assignVDFToLinkSubSet(51, "type=538 and vdf=64")
self._assignVDFToLinkSubSet(90, "vdf=90") #Centroid connectors
if self.makeChangesPermanent:
with _m.logbook_trace("Copying new VDF values into network"):
self._copyAttributeToVDF()
##########################################################################################################
#----CONTEXT MANAGERS---------------------------------------------------------------------------------
'''
Context managers for temporary database modifications.
'''
@contextmanager
def _vdfAttributeMANAGER(self):
#Code here is executed upon entry
att = None
att = self.scenario.extra_attribute("@vdf")
if att is None:
att = self.scenario.create_extra_attribute('LINK', '@vdf', default_value=0)
_m.logbook_write("Created temporary link '%s' attribute to store new VDFs." %att.id)
else:
att.initialize()
_m.logbook_write("Initialized attribute '%s'." %att.id)
try:
yield att
finally:
# Code here is executed in all cases.
if self.makeChangesPermanent:
i = att.id
self.scenario.delete_extra_attribute(att)
_m.logbook_write("Deleted temporary link attribute '%s'" %i)
else:
_m.logbook_write("Temporary link attribute '%s' made permanent." %att.id)
#----SUB FUNCTIONS---------------------------------------------------------------------------------
def _getAtts(self):
atts = {
"Scenario" : str(self.scenario.id),
"self": self.__MODELLER_NAMESPACE__}
return atts
def _assignVDFToLinkSubSet(self, vdf, filterExpression):
spec = {
"result": self.attr.id,
"expression": str(vdf),
"selections": {"link": filterExpression},
"type": "NETWORK_CALCULATION"
}
self.networkCalculator(spec, scenario=self.scenario)
def _copyAttributeToVDF(self):
spec = {
"result": "vdf",
"expression": self.attr.id,
"selections": {"link": "all"},
"type": "NETWORK_CALCULATION"
}
self.networkCalculator(spec, scenario=self.scenario)
@_m.method(return_type=unicode)
def tool_run_msg_status(self):
return self.tool_run_msg
|
gpl-3.0
| -17,125,685,268,828,372 | 41.139303 | 148 | 0.526036 | false | 4.089329 | false | false | false |
ioreshnikov/wells
|
wells/time_dependent.py
|
1
|
1615
|
import scipy as s
import scipy.fftpack as fft
import scipy.integrate
import sys
def integrate(t, x, input, potential, delta, pump, loss, absorber):
nt = len(t)
nx = len(x)
dx = x[1] - x[0]
k = 2*s.pi * fft.fftfreq(nx, dx)
d = - delta - 1/2 * k**2
spectrum = fft.fft(input)
spectrum_ = spectrum
def rhs(t, spectrum_):
exp_ = s.exp(1j * d * t)
spectrum = exp_ * spectrum_
state = fft.ifft(spectrum)
nonlinearity = abs(state)**2 * state
nonlinearity += - potential * state
nonlinearity += 1j * loss * state
nonlinearity += 1j * absorber * (abs(state) - abs(input)) * state
nonlinearity += pump
return 1j * 1/exp_ * fft.fft(nonlinearity)
solver = scipy.integrate.ode(rhs)
solver.set_integrator("zvode",
rtol=1E-6,
atol=1E-10,
nsteps=2048)
solver.set_initial_value(spectrum_, 0)
spectra_ = s.zeros((nt, nx), dtype=complex)
spectra_[0, :] = spectrum_
# Preallocate return matrices in advance.
spectra = s.zeros((nt, nx), dtype=complex)
states = s.zeros((nt, nx), dtype=complex)
for i in range(1, nt):
sys.stderr.write("\rIntegrating: %-3.3f%%" % (100 * i/nt))
spectra_[i, :] = solver.integrate(t[i])
sys.stderr.write("\r")
for i in range(0, nt):
spectra[i, :] = s.exp(1j * d * t[i]) * spectra_[i, :]
states[i, :] = fft.ifft(spectra[i, :])
spectra[i, :] = 1/nt * fft.fftshift(spectra[i, :])
k = fft.fftshift(k)
return k, states, spectra
|
mit
| 2,178,837,503,368,915,500 | 28.907407 | 73 | 0.544892 | false | 3.117761 | false | false | false |
HenriqueLR/payments
|
app/main/decorators.py
|
1
|
1514
|
#encoding: utf-8
from django.core.exceptions import PermissionDenied
from django.contrib import messages
from django.shortcuts import redirect
from django.contrib.auth import get_user_model
from wallet.models import Debit, Deposit, Note
from main.utils import get_list_permissions
User = get_user_model()
def ajax_required(view):
def wrap(request, *args, **kwargs):
if not request.is_ajax():
messages.error(request, 'Impossivel acessar o link, entre em contato com administrador do sistema')
return redirect('accounts:logout')
return view(request, *args, **kwargs)
wrap.__doc__ = view.__doc__
wrap.__name__ = view.__name__
return wrap
def permissions_denied(view):
def wrap(request, *args, **kwargs):
list_model = [Debit, Deposit, Note]
for model in list_model:
permissions = get_list_permissions(model, permission_list=['all'])
if not request.user.has_perms(permissions):
messages.error(request, 'Não possui as permissões necessárias, contate o administrador do sistema')
return redirect('accounts:logout')
return view(request, *args, **kwargs)
wrap.__doc__ = view.__doc__
wrap.__name__ = view.__name__
return wrap
def verify_payment(view):
def wrap(request, *args, **kwargs):
if not request.user.account.status_payment:
messages.error(request, 'Estamos aguardando o pagamento para liberação do sistema')
return redirect('accounts:logout')
return view(request, *args, **kwargs)
wrap.__doc__ = view.__doc__
wrap.__name__ = view.__name__
return wrap
|
mit
| 5,929,164,697,228,002,000 | 31.12766 | 103 | 0.721007 | false | 3.217484 | false | false | false |
vindeka/gateswift
|
gateswift/middleware.py
|
1
|
5131
|
# Copyright (c) 2013 Vindeka, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import kombu
from swift.common.utils import get_logger
from swift.common.swob import Request, HttpOk, HTTPBadRequest, HttpNotFound
class GateMiddleware(object):
"""
Gate middleware for swift communication.
Add to your pipeline in proxy-server.conf, such as::
[pipeline:main]
pipeline = catch_errors cache tempauth gatemiddleware proxy-server
And add a keystone2 filter section, such as::
[filter:gatemiddleware]
use = egg:gateswift#gatemiddleware
amqp_connection = amqp://guest:guest@localhost/
:param app: The next WSGI app in the pipeline
:param conf: The dict of configuration values
"""
def __init__(self, app, conf):
self.app = app
self.conf = conf
self.logger = get_logger(conf, log_route='gatemiddleware')
self.conn_str = conf.get('amqp_connection', 'amqp://localhost/')
self.exc_str = conf.get('amqp_exchange', 'gate')
self.exc_type = conf.get('amqp_exchange_type', 'direct')
self.exc_durable = bool(conf.get('amqp_exchange_durable', 'True'))
def __call__(self, env, start_response):
self.logger.debug('Initialising gate middleware')
req = Request(env)
try:
version, account = req.split_path(1, 3, True)
except ValueError:
return HttpNotFound(request=req)
if account is 'gate':
# Handles direct calls to gate
return HttpOk
if 'X-Gate-Verify' in env:
verify = env['X-Gate-Verify']
self.logger.debug('Verification request: %s algorithms: %s' % (req.path, verify))
try:
version, account, container, obj = req.split_path(4, 4, True)
except ValueError:
return HTTPBadRequest(request=req)
algorithms = verify.split(',')
for algo in algorithms:
metakey = 'X-Object-Meta-Gate-%s' % algo.upper()
if metakey not in env:
self.logger.debug('Invalid verification request, object missing: %s' % (metakey))
return HTTPBadRequest(request=req)
if publish_verify(req.path, algorithms):
for algo in algorithms:
statuskey = 'X-Object-Meta-Gate-Verify-%s-Status' % algo.upper()
env[statuskey] = 'Queued'
env['X-Object-Meta-Gate-Verify'] = verify
if 'X-Gate-Process' in env:
module = env['X-Gate-Process']
self.logger.debug('Process request: %s module: %s' % (req.path, module))
try:
version, case, container, obj = req.split_path(4, 4, True)
except ValueError:
return HTTPBadRequest(request=req)
if publish_process(req.path, algorithms):
for algo in algorithms:
env['X-Object-Meta-Gate-Process'] = module
env['X-Object-Meta-Gate-Process-Status'] = 'Queued'
# TODO: Get reponse to see if a fake object
reponse = self.app(env, start_response)
return reponse
def publish_verify(self, path, algorithms):
""" Publish a verify request on the queue to gate engine """
exchange = kombu.Exchange(self.exc_str, exc_type, durable=exc_durable)
queue = kombu.Queue('verify', exchange=exchange, routing_key='verify')
with kombu.Connection(self.conn_str) as connection:
with connection.Producer(serializer='json') as producer:
producer.publish({'path':path, 'algorithms':algorithms},
exchange=exchange, routing_key='verify', declare=[queue])
return True
def publish_process(self, path, module):
""" Publish a process request on the queue to gate engine """
exchange = kombu.Exchange(self.exc_str, exc_type, durable=exc_durable)
queue = kombu.Queue('process', exchange=exchange, routing_key='process')
with kombu.Connection(self.conn_str) as connection:
with connection.Producer(serializer='json') as producer:
producer.publish({'path':path, 'module':module},
exchange=exchange, routing_key='process', declare=[queue])
return True
def filter_factory(global_conf, **local_conf):
"""Returns a WSGI filter app for use with paste.deploy."""
conf = global_conf.copy()
conf.update(local_conf)
def auth_filter(app):
return GateMiddleware(app, conf)
return auth_filter
|
apache-2.0
| 8,751,585,938,849,223,000 | 38.469231 | 101 | 0.618008 | false | 4.094972 | false | false | false |
EdTsft/swilite
|
swilite/prolog.py
|
1
|
50967
|
"""An object-oriented interface to Prolog."""
from collections import namedtuple
from ctypes import (
POINTER,
byref,
c_char,
c_double,
c_int,
c_int64,
c_size_t,
c_void_p,
)
from swilite.core import (
BUF_DISCARDABLE,
CVT_WRITEQ,
PL_ATOM,
PL_BLOB,
PL_DICT,
PL_FLOAT,
PL_INTEGER,
PL_LIST_PAIR,
PL_NIL,
PL_Q_CATCH_EXCEPTION,
PL_Q_NODEBUG,
PL_STRING,
PL_TERM,
PL_VARIABLE,
PL_atom_chars,
PL_call,
PL_call_predicate,
PL_chars_to_term,
PL_close_foreign_frame,
PL_close_query,
PL_cons_functor,
PL_cons_functor_v,
PL_cons_list,
PL_context,
PL_copy_term_ref,
PL_discard_foreign_frame,
PL_erase,
PL_exception,
PL_functor_arity,
PL_functor_name,
PL_get_arg,
PL_get_atom,
PL_get_atom_nchars,
PL_get_bool,
PL_get_compound_name_arity,
PL_get_float,
PL_get_functor,
PL_get_head,
PL_get_int64,
PL_get_list,
PL_get_module,
PL_get_name_arity,
PL_get_nchars,
PL_get_nil,
PL_get_pointer,
PL_get_string_chars,
PL_get_tail,
PL_is_acyclic,
PL_is_atom,
PL_is_atomic,
PL_is_callable,
PL_is_compound,
PL_is_float,
PL_is_functor,
PL_is_ground,
PL_is_integer,
PL_is_list,
PL_is_number,
PL_is_pair,
PL_is_string,
PL_is_variable,
PL_module_name,
PL_new_atom,
PL_new_functor,
PL_new_module,
PL_new_term_ref,
PL_new_term_refs,
PL_next_solution,
PL_open_foreign_frame,
PL_open_query,
PL_pred,
PL_predicate,
PL_predicate_info,
PL_put_atom,
PL_put_atom_nchars,
PL_put_bool,
PL_put_float,
PL_put_functor,
PL_put_int64,
PL_put_list,
PL_put_list_nchars,
PL_put_nil,
PL_put_pointer,
PL_put_string_nchars,
PL_put_term,
PL_put_variable,
PL_record,
PL_recorded,
PL_register_atom,
PL_rewind_foreign_frame,
PL_term_type,
PL_unify,
PL_unify_arg,
PL_unify_atom,
PL_unify_atom_nchars,
PL_unify_bool,
PL_unify_compound,
PL_unify_float,
PL_unify_functor,
PL_unify_int64,
PL_unify_list,
PL_unify_list_nchars,
PL_unify_nil,
PL_unify_pointer,
PL_unify_string_nchars,
PL_unregister_atom,
REP_UTF8,
atom_t,
functor_t,
module_t,
state as prolog_state,
)
_term_type_code_name = {
PL_VARIABLE: 'variable',
PL_ATOM: 'atom',
PL_INTEGER: 'integer',
PL_FLOAT: 'float',
PL_STRING: 'string',
PL_TERM: 'compound',
PL_NIL: 'nil',
PL_BLOB: 'blob',
PL_LIST_PAIR: 'list-pair',
PL_DICT: 'dict',
}
__all__ = [
'Atom',
'Frame',
'Functor',
'Module',
'Predicate',
'PrologCallFailed',
'PrologException',
'PrologMemoryError',
'Query',
'Term',
'TermList',
'TermRecord',
]
class PrologException(Exception):
"""An exception raised within the Prolog system."""
def __init__(self, exception_term):
super().__init__()
self.exception_term = exception_term
def __str__(self):
return "Prolog Exception:\n{!s}".format(self.exception_term)
def __repr__(self):
return 'PrologException({!r})'.format(self.exception_term)
class PrologCallFailed(RuntimeError):
"""A call failed."""
def __init__(self, msg):
super().__init__()
self.msg = msg
def __str__(self):
return str(self.msg)
class PrologMemoryError(Exception):
"""Prolog stack is out of memory."""
pass
class HandleWrapper(object):
"""Class wrapping a handle."""
def __init__(self, handle):
self._handle = handle
@classmethod
def _from_handle(cls, handle):
"""Initialize from an existing handle."""
if handle is None:
# When the handle truly is 0, ctypes interprets the value as None.
# Undo the mistake here.
# Unfortunately, this means we can't warn about None being passed
# when it's an error.
handle = 0
if not isinstance(handle, int):
raise ValueError('Handle must be an int, not {}'.format(
type(handle).__name__))
new_obj = cls.__new__(cls)
HandleWrapper.__init__(new_obj, handle=handle)
return new_obj
def __eq__(self, other):
if type(other) is not type(self):
return NotImplemented
return self._handle == other._handle
def __ne__(self, other):
return not self == other
class TemporaryHandleMixIn(object):
"""Mixin for `HandleWrapper` where the handle can be invalidated."""
_valid = True
def __init__(self):
super().__init__()
def _get_handle(self):
if self._valid:
return self.__handle
raise AttributeError('handle been invalidated')
def _set_handle(self, handle):
self.__handle = handle
_handle = property(fget=_get_handle, fset=_set_handle)
def _invalidate(self):
"""Invalidate the handle."""
self._valid = False
class ConstantHandleToConstantMixIn(object):
"""`HandleWrapper` mixin where `_handle` is constant and refers to a
constant object.
"""
def __hash__(self):
return hash(self._handle)
def _decode_ptr_len_string(ptr, length, encoding='utf8'):
"""Decode a string from a ctypes pointer and length."""
return ptr[:length.value].decode(encoding)
class Atom(HandleWrapper):
"""Prolog Atom Interface"""
def __init__(self, name):
"""Create a named atom."""
super().__init__(handle=PL_new_atom(name.encode()))
@classmethod
def _from_handle(cls, handle):
"""Create an Atom object from an existing atom handle."""
new_atom = super()._from_handle(handle)
PL_register_atom(new_atom._handle)
return new_atom
def __str__(self):
return self.get_name()
def __repr__(self):
return 'Atom(name={name!r})'.format(name=self.get_name())
def __del__(self):
if prolog_state.is_available:
PL_unregister_atom(self._handle)
def __copy__(self):
"""A new `Atom` object pointing to the same atom."""
return self._from_handle(self._handle)
def __eq__(self, other):
if type(other) is not type(self):
return NotImplemented
# Atoms can have different handles but the same name.
return self.get_name() == other.get_name()
def __hash__(self):
return hash(self.get_name())
def get_name(self):
"""The atom's name as a string."""
return PL_atom_chars(self._handle).decode()
class Functor(HandleWrapper, ConstantHandleToConstantMixIn):
"""Prolog Functor Interface"""
def __init__(self, name, arity):
"""Create a functor.
Args:
name (Atom): Name of the functor.
Either Atom object or string, the former is more efficient.
arity (int): Arity of the functor.
"""
try:
name_handle = name._handle
except AttributeError:
name_handle = Atom(name=name)._handle
super().__init__(handle=PL_new_functor(name_handle, arity))
def __str__(self):
return "{name}/{arity}".format(name=self.get_name(),
arity=self.get_arity())
def __repr__(self):
return "Functor(name={name!r}, arity={arity!r})".format(
name=self.get_name(), arity=self.get_arity())
def __eq__(self, other):
if type(other) is not type(self):
return NotImplemented
return (self.get_name() == other.get_name() and
self.get_arity() == other.get_arity())
def __hash__(self):
return hash((self.get_name(), self.get_arity()))
def __call__(self, *args):
"""Returns a new compound term created from this functor and `args`.
The length of `args` must be the same as the arity of `functor`.
See `Term.from_cons_functor`.
"""
return Term.from_cons_functor(self, *args)
def get_name(self):
"""The functor's name as an `Atom` object."""
return Atom._from_handle(PL_functor_name(self._handle))
def get_arity(self):
"""The functor's arity as an integer."""
return PL_functor_arity(self._handle)
class Module(HandleWrapper, ConstantHandleToConstantMixIn):
"""Prolog Module Interface"""
def __init__(self, name):
"""Finds existing module or creates a new module with given name.
Args:
name (Atom): Name of the module.
"""
super().__init__(handle=PL_new_module(name._handle))
def __str__(self):
return str(self.get_name())
def __repr__(self):
return 'Module(name={name!r})'.format(name=self.get_name())
def __eq__(self, other):
if type(other) is not type(self):
return NotImplemented
return self.get_name() == other.get_name()
def __hash__(self):
return hash(self.get_name())
@classmethod
def current_context(cls):
"""Returns the current context module."""
return cls._from_handle(PL_context())
def get_name(self):
"""The name of the module as an `Atom` object."""
return Atom._from_handle(PL_module_name(self._handle))
class Predicate(HandleWrapper, ConstantHandleToConstantMixIn):
"""Prolog Predicate Interface"""
def __init__(self, functor, module=None):
"""Create a predicate from a functor.
Args:
functor (Functor): Functor used to create the predicate.
module (Module) : Module containing the functor.
If ``None``, uses the current context module.
"""
super().__init__(
handle=PL_pred(functor._handle, _get_nullable_handle(module)))
@classmethod
def from_name_arity(cls, name, arity, module_name=None):
"""Create a predicate directly from Python's built-in types.
Args:
name (str) : Name of functor used to create the predicate.
arity (int) : Arity of functor used to create the predicate.
module_name (str): Name of module containing the functor.
If ``None``, uses the current context module.
"""
return cls._from_handle(handle=PL_predicate(
name.encode(), arity,
module_name.encode() if module_name is not None else None))
def __str__(self):
info = self.get_info()
return '{module_prefix}{name}/{arity}'.format(
module_prefix=(str(info.module) + ':'
if info.module is not None else ''),
name=info.name,
arity=info.arity)
def __repr__(self):
info = self.get_info()
return 'Predicate(functor={functor!r}, module={module!r})'.format(
functor=Functor(name=info.name, arity=info.arity),
module=info.module)
def __eq__(self, other):
if type(other) is not type(self):
return NotImplemented
return self.get_info() == other.get_info()
def __hash__(self):
return hash(self.get_info())
def __call__(self, *arguments, arglist=None, goal_context_module=None,
check=False):
"""Call predicate with arguments.
Finds a binding for arguments that satisfies the predicate.
Like Query but only finds the first solution.
Args:
*arguments (Term) : Terms to pass as arguments to this
predicate.
arglist (TermList) : Arguments to this predicate.
Cannot pass both arguments and arglist.
goal_context_module (Module): Context module of the goal.
If ``None``, the current context module is used, or ``user`` if
there is no context. This only matters for meta_predicates.
check (bool) : Check that the call succeeded.
Returns:
bool: True if a binding for `arguments` was found.
Raises:
PrologException : If an exception was raised in Prolog.
PrologCallFailed: If the call failed and `check` is ``True``.
"""
if arglist is None:
arglist = TermList.from_terms(*arguments)
elif arguments:
raise ValueError('Cannot provide both "arguments" and "arglist".')
self.check_argument_match(arglist)
success = bool(PL_call_predicate(
_get_nullable_handle(goal_context_module),
PL_Q_NODEBUG | PL_Q_CATCH_EXCEPTION,
self._handle,
arglist._handle))
if check and not success:
raise PrologCallFailed(str(self))
return success
Info = namedtuple('Info', ['name', 'arity', 'module'])
def get_info(self):
"""Returns name, arity, and module of this predicate.
Returns:
Predicate.Info:
"""
name = atom_t()
arity = c_int()
module = module_t()
PL_predicate_info(self._handle,
byref(name), byref(arity), byref(module))
return self.Info(name=Atom._from_handle(name.value),
arity=arity.value,
module=Module._from_handle(module.value))
def check_argument_match(self, arguments):
"""Check that the right number of arguments are given.
Args:
arguments (TermList): List of arguments.
Raises:
ValueError : If the number of arguments does not match
the predicate's arity.
"""
number_of_arguments = len(arguments)
arity = self.get_info().arity
if number_of_arguments != arity:
raise ValueError(
('number of arguments ({nargs}) does not match '
'predicate arity ({arity})').format(
nargs=number_of_arguments,
arity=arity))
class Term(HandleWrapper):
"""Prolog Term Interface."""
_equality_predicate = Predicate.from_name_arity(name='==', arity=2)
_logical_or_functor = Functor(';', 2)
_logical_and_functor = Functor(',', 2)
def __init__(self):
"""Initialize a new term. The term is initially a variable."""
super().__init__(handle=PL_new_term_ref())
def __str__(self):
"""A Prolog string representing this term."""
return self.get_chars()
def __repr__(self):
return ('Term(handle={handle!r}, type={type!r}, value={value!r})'
.format(handle=self._handle, type=self.type(),
value=self.get_chars()))
def __eq__(self, other):
"""Check if two terms have the same value. Does not perform unification.
"""
try:
return self._equality_predicate(self, other)
except AttributeError as e:
if '_handle' not in str(e):
raise
return NotImplemented
def __or__(self, other):
"""Logical OR of two terms."""
return self._logical_or_functor(self, other)
def __and__(self, other):
"""Logical AND of two terms."""
return self._logical_and_functor(self, other)
def __int__(self):
"""Integer representation of this term (if it stores an integer)."""
return self.get_integer()
def __float__(self):
"""Float representation of this term (if it stores a float)."""
return self.get_float()
def __deepcopy__(self, memo):
"""Creates a new Prolog term, copied from the old."""
return self.from_term(self)
def type(self):
"""Term type as a string.
Returns one of the following strings:
* ``variable``
* ``atom``
* ``integer``
* ``float``
* ``string``
* ``term``
* ``nil``
* ``blob``
* ``list-pair``
* ``dict``
"""
type_code = PL_term_type(self._handle)
return _term_type_code_name[type_code]
def is_acyclic(self):
"""True if this is an acyclic term."""
return bool(PL_is_acyclic(self._handle))
def is_atom(self):
"""True if this term is an atom."""
return bool(PL_is_atom(self._handle))
def is_atomic(self):
"""True if this term is atomic.
A term is atomic if it is not variable or compound.
"""
return bool(PL_is_atomic(self._handle))
def is_callable(self):
"""True if this term is callable.
A term is callable if it is compound or an atom.
"""
return bool(PL_is_callable(self._handle))
def is_compound(self):
"""True if this term is compound.
A compound term is a functor with arguments.
"""
return bool(PL_is_compound(self._handle))
def is_float(self):
"""True if this term is a float."""
return bool(PL_is_float(self._handle))
def is_functor(self, functor):
"""True if this term is compound and its functor is `functor`.
Args:
functor (Functor): Check if this is the functor of `self`.
"""
return bool(PL_is_functor(self._handle, functor._handle))
def is_ground(self):
"""True if this term is a ground term.
A ground term is a term that holds no free variables.
"""
return bool(PL_is_ground(self._handle))
def is_integer(self):
"""True if this term is an integer."""
return bool(PL_is_integer(self._handle))
def is_list(self):
"""True if this term is a list.
A term is a list if it is:
* a compound term using the list constructor (`is_pair`); or
* the list terminator (`is_nil`).
Note:
This definition is weaker than what is used by the prolog predicate
``is_list``, which has the additional constraint that the 2nd term
in the list pair also be a list.
For example,
>>> Term.from_parsed('[1|2]').is_list()
True
>>> Term.from_parsed('is_list([1|2])')()
False
"""
return bool(PL_is_list(self._handle))
def is_nil(self):
"""True if this term is the list terminator.
The list terminator is the constant ``[]``.
"""
return bool(PL_get_nil(self._handle))
def is_number(self):
"""True if this term is an integer or float."""
return bool(PL_is_number(self._handle))
def is_pair(self):
"""True if this term is a compound term using the list constructor."""
return bool(PL_is_pair(self._handle))
def is_string(self):
"""True if this term is a string."""
return bool(PL_is_string(self._handle))
def is_variable(self):
"""True if this term is a variable."""
return bool(PL_is_variable(self._handle))
@staticmethod
def _require_success(return_code):
assert bool(return_code)
@staticmethod
def _require_success_expecting_type(return_code, *required_types):
assert required_types
if not bool(return_code):
if len(required_types) == 1:
type_str = required_types[0]
elif len(required_types) == 2:
type_str = '{} or {}'.format(*required_types)
else:
type_str = '{}, or {}'.format(
', '.join(required_types[:-1],),
required_types[-1])
raise TypeError('Term is not {a} {type}.'.format(
a=('an' if type_str[0].lower() in 'aeiou' else 'a'),
type=type_str))
def get_atom(self):
"""An `Atom` object representing this term, if it is a prolog atom."""
a = atom_t()
self._require_success_expecting_type(
PL_get_atom(self._handle, byref(a)),
'atom')
return Atom._from_handle(a.value)
def get_atom_name(self):
"""The value of this term as a string, if it is a prolog atom."""
s = POINTER(c_char)()
length = c_size_t()
self._require_success_expecting_type(
PL_get_atom_nchars(self._handle, byref(length), byref(s)),
'atom')
return _decode_ptr_len_string(s, length)
def get_string_chars(self):
"""The value of this term as a string, if it is a prolog string."""
s = POINTER(c_char)()
length = c_size_t()
self._require_success_expecting_type(
PL_get_string_chars(self._handle, byref(s), byref(length)),
'string')
return _decode_ptr_len_string(s, length)
def get_chars(self):
"""Representation of this term as a string in Prolog syntax."""
s = POINTER(c_char)()
length = c_size_t()
self._require_success(
PL_get_nchars(self._handle,
byref(length),
byref(s),
CVT_WRITEQ | BUF_DISCARDABLE | REP_UTF8))
return _decode_ptr_len_string(s, length, encoding='utf8')
def get_integer(self):
"""The value of this term as an integer, if it is an integer or
compatible float.
"""
i = c_int64()
self._require_success_expecting_type(
PL_get_int64(self._handle, byref(i)),
'integer', 'int-compatible float')
return i.value
def get_bool(self):
"""The value of this term as a boolean, if it is `true` or `false`."""
i = c_int()
self._require_success_expecting_type(
PL_get_bool(self._handle, byref(i)),
'boolean')
return bool(i.value)
def get_pointer(self):
"""The value of this term as an integer address, if it is a pointer."""
p = c_void_p()
self._require_success_expecting_type(
PL_get_pointer(self._handle, byref(p)),
'pointer')
return p.value
def get_float(self):
"""The value of this term as a float, if it is an integer or float."""
f = c_double()
self._require_success_expecting_type(
PL_get_float(self._handle, byref(f)),
'float', 'integer')
return f.value
def get_functor(self):
"""A `Functor` object representing this term, if it is a compound term
or atom."""
functor = functor_t()
self._require_success_expecting_type(
PL_get_functor(self._handle, byref(functor)),
'compound term', 'atom')
return Functor._from_handle(functor.value)
NameArity = namedtuple('NameArity', ['name', 'arity'])
def get_name_arity(self):
"""The name and arity of this term, if it is a compound term or an atom.
Compound terms with arity 0 give the same result as an atom.
To distinguish them use `is_compound` and/or `get_compound_name_arity`.
Returns:
NameArity: namedtuple (name, arity)
"""
name = atom_t()
arity = c_int()
self._require_success_expecting_type(
PL_get_name_arity(self._handle, byref(name), byref(arity)),
'compound term', 'atom')
return self.NameArity(name=Atom._from_handle(name.value),
arity=arity.value)
def get_compound_name_arity(self):
"""The name and arity of this term, if it is a compound term.
The same as `get_name_arity` but fails for atoms.
Returns:
NameArity: Named tuple of name (`string`) and arity (`int`).
"""
name = atom_t()
arity = c_int()
self._require_success_expecting_type(
PL_get_compound_name_arity(self._handle, byref(name),
byref(arity)),
'compound term')
return self.NameArity(name=Atom._from_handle(name.value),
arity=arity.value)
def get_module(self):
"""A `Module` object corresponding to this term, if it is an atom."""
module = module_t()
self._require_success_expecting_type(
PL_get_module(self._handle, byref(module)),
'atom')
return Module._from_handle(module.value)
def get_arg(self, index):
"""A new term with a reference to an argument of this term.
Args:
index (int): Index of the argument.
Index is 0-based, unlike in Prolog.
Returns:
Term: A new term reference to the argument.
Raises:
AssertionError: If `index` is out of bounds or
if this term is not compound.
Note: This returns a _new_ term, not a the argument term itself.
Therefore, using `put_*` methods on the return value will not
change the argument itself, while unification will.
"""
t = Term()
self._require_success(
PL_get_arg(index + 1, self._handle, t._handle))
return t
HeadTail = namedtuple('HeadTail', ['head', 'tail'])
def get_list_head_tail(self):
"""Get the head and tail of the list represented by this term.
Returns:
HeadTail: Named tuple of head and tail, both `Term` objects.
"""
head = Term()
tail = Term()
self._require_success_expecting_type(
PL_get_list(self._handle, head._handle, tail._handle),
'list')
return self.HeadTail(head=head, tail=tail)
def get_list_head(self):
"""The head of the list represented by this term.
Returns:
Term:
"""
head = Term()
self._require_success_expecting_type(
PL_get_head(self._handle, head._handle),
'list')
return head
def get_list_tail(self):
"""The tail of the list represented by this term.
Returns:
Term:
"""
tail = Term()
self._require_success_expecting_type(
PL_get_tail(self._handle, tail._handle),
'list')
return tail
def get_nil(self):
"""Succeeds if this term represents the list termination constant (nil).
Raises:
AssertionError: If this term does not represent nil.
"""
self._require_success(
PL_get_nil(self._handle))
def put_variable(self):
"""Put a fresh variable in this term, resetting it to its initial state.
"""
PL_put_variable(self._handle)
def put_atom(self, atom):
"""Put an atom in this term.
Args:
atom (Atom): Atom to put in this term.
"""
PL_put_atom(self._handle, atom._handle)
def put_bool(self, val):
"""Put a boolean in this term.
Puts either the atom ``true`` or the atom ``false``.
"""
PL_put_bool(self._handle, int(bool(val)))
def put_atom_name(self, atom_name):
"""Put an atom in this term, constructed from a string name.
Args:
atom_name (str): Name of the atom to put in this term.
"""
encoded_atom_name = atom_name.encode()
PL_put_atom_nchars(self._handle,
len(encoded_atom_name),
encoded_atom_name)
def put_string(self, string):
"""Put a string in the term."""
encoded_string = string.encode()
self._require_success(
PL_put_string_nchars(self._handle,
len(encoded_string),
encoded_string))
def put_list_chars(self, bytes_):
"""Put a byte string in the term as a list of characters."""
self._require_success(
PL_put_list_nchars(self._handle, len(bytes_), bytes_))
def put_integer(self, val):
"""Put an integer in the term."""
self._require_success(
PL_put_int64(self._handle, val))
def put_pointer(self, address):
"""Put an integer address in the term."""
self._require_success(
PL_put_pointer(self._handle, address))
def put_float(self, val):
"""Put a floating-point value in the term."""
self._require_success(
PL_put_float(self._handle, val))
def put_functor(self, functor):
"""Put a compound term created from a functor in this term.
The arguments of the compound term are __TEMPORARY__ variables.
To create a term with instantiated arguments or with persistent
variables, use `put_cons_functor`.
Warning:
The arguments of the returned compound term are not persistent.
References to the arguments (e.g. using `get_arg`) may be
invalidated by the prolog system after other API calls.
Either use `put_cons_functor` or get a new reference to the
arguments each time they are needed.
"""
self._require_success(
PL_put_functor(self._handle, functor._handle))
def put_list(self):
"""Put a list pair in this term, whose head and tail are variables.
Like `put_functor` but using the ``[|]`` functor.
"""
self._require_success(
PL_put_list(self._handle))
def put_nil(self):
"""Put the list terminator constant in this term."""
self._require_success(
PL_put_nil(self._handle))
def put_term(self, term):
"""Set this term to reference the new term."""
PL_put_term(self._handle, term._handle)
@classmethod
def from_term(cls, term):
"""Create a new term as a copy of an existing one."""
return cls._from_handle(handle=PL_copy_term_ref(term._handle))
def put_parsed(self, string):
"""Parse `string` as Prolog and place the result in this term.
Args:
string (str): A term string in Prolog syntax.
Optionally ends with a full-stop (.)
Raises:
PrologException: If the parse fails.
The exception is also stored in this term.
"""
success = PL_chars_to_term(string.encode(), self._handle)
if not success:
raise PrologException(self)
def put_cons_functor(self, functor, *args):
"""Set this term to a compound term created from `functor` and `args`.
The length of `args` must be the same as the arity of `functor`.
"""
functor_arity = functor.get_arity()
if functor_arity != len(args):
raise TypeError(
('Functor arity ({arity}) does not match '
'number of arguments ({nargs}).').format(
arity=functor_arity, nargs=len(args)))
if not all(isinstance(arg, Term) for arg in args):
raise TypeError(
'All arguments after `functor` must be `Term` objects.')
if len(args) > 4:
# PL_cons_functor segfaults when passed > 4 arguments
return self.put_cons_functor_v(functor, TermList.from_terms(*args))
self._require_success(
PL_cons_functor(self._handle, functor._handle,
*[arg._handle for arg in args]))
def put_cons_functor_v(self, functor, args):
"""Set this term to a compound term created from `functor` and args.
Args:
functor (Functor): Functor used to create the compound term.
args (TermList) : A term list of arguments.
"""
self._require_success(
PL_cons_functor_v(self._handle,
functor._handle,
args._handle))
def put_cons_list(self, head, tail):
"""Set this term to a list constructed from head and tail."""
self._require_success(
PL_cons_list(self._handle, head._handle, tail._handle))
def put_list_terms(self, terms):
"""Set this term to a list constructed from a list of terms.
Args:
terms (list): A (python) list of terms.
"""
try:
head = terms.pop(0)
except IndexError:
self.put_nil()
return
tail = Term.from_nil()
while terms:
tail = Term.from_cons_list(terms.pop(), tail)
self.put_cons_list(head, tail)
def __call__(self, context_module=None, check=False):
"""Call term like once(term).
Attempts to find an assignment of the variables in the term that
makes the term true.
Args:
context_module (Module): Context module of the goal.
check (bool) : Check that the call succeeded.
Returns:
bool: True if the call succeeded.
Raises:
PrologCallFailed: If the call failed and `check` is ``True``.
"""
success = bool(PL_call(self._handle,
_get_nullable_handle(context_module)))
if check and not success:
raise PrologCallFailed(str(self))
return success
def unify(self, term):
"""Unify with a term.
Functionally equivalent to:
`Predicate.from_name_arity('=', 2)(self, term)`
Returns:
bool: True if the unification was successful
Even if this returns false, the unification may have partially
completed and variables will remain bound. Use with `Frame` to
completely undo bindings in the event of failure.
"""
return bool(PL_unify(self._handle, term._handle))
def unify_atom(self, atom):
"""Unify with an atom.
Returns:
bool: True on success.
"""
return bool(PL_unify_atom(self._handle, atom._handle))
def unify_bool(self, val):
"""Unify with a boolean.
Returns:
bool: True on success.
"""
return bool(PL_unify_bool(self._handle, int(bool(val))))
def unify_atom_name(self, atom_name):
"""Unify with an atom given by its name.
Returns:
bool: True on success.
"""
encoded_atom_name = atom_name.encode()
return bool(PL_unify_atom_nchars(self._handle,
len(encoded_atom_name),
encoded_atom_name))
def unify_list_chars(self, bytes_):
"""Unify with a list of bytes.
Returns:
bool: True on success.
"""
return bool(PL_unify_list_nchars(self._handle, len(bytes_), bytes_))
def unify_string(self, string):
"""Unify with a string.
Returns:
bool: True on success.
"""
encoded_string = string.encode()
return bool(PL_unify_string_nchars(self._handle,
len(encoded_string),
encoded_string))
def unify_integer(self, val):
"""Unify with an integer.
Returns:
bool: True on success.
"""
return bool(PL_unify_int64(self._handle, val))
def unify_float(self, val):
"""Unify with a floating-point value.
Returns:
bool: True on success.
"""
return bool(PL_unify_float(self._handle, val))
def unify_pointer(self, address):
"""Unify with an integer address.
Returns:
bool: True on success.
"""
return bool(PL_unify_pointer(self._handle, address))
def unify_functor(self, functor):
"""Unify with a functor.
Unifies the functor, not any arguments.
If functor has arity 0, unifies with an atom.
Identical to `Term.unify_compound` except for arity-0 functors.
Returns:
bool: True on success.
"""
return bool(PL_unify_functor(self._handle, functor))
def unify_compound(self, functor):
"""Unify with a compound functor.
Unifies the functor, not any arguments.
If functor has arity 0, unifies with an arity-0 compound term.
Identical to `Term.unify_compound` except for arity-0 functors.
Returns:
bool: True on success.
"""
return bool(PL_unify_compound(self._handle, functor))
def unify_list(self, head, tail):
"""Unify with a list cell [head | tail] for terms head, tail.
Returns:
bool: True on success.
"""
return bool(PL_unify_list(self._handle, head._handle, tail._handle))
def unify_nil(self):
"""Unify with the list terminator constant.
Returns:
bool: True on success.
"""
return bool(PL_unify_nil(self._handle))
def unify_arg(self, index, arg):
"""Unify the index-th argument of a compound term with term `arg`.
Indexing is 0-based.
Returns:
bool: True on success.
"""
return bool(PL_unify_arg(index + 1, self._handle, arg._handle))
def _add_from_method_to_class(klass, put_method_name, put_method):
suffix = put_method_name[4:]
from_method_name = 'from_' + suffix
if hasattr(klass, from_method_name):
raise AttributeError('{} method already exists.'.format(
from_method_name))
def from_method(cls, *args, **kwargs):
new_term = cls()
put_method(new_term, *args, **kwargs)
return new_term
from_method.__name__ = from_method_name
from_method.__qualname__ = str(klass.__name__) + '.' + from_method_name
from_method.__doc__ = 'A new Term initialized using `{}`'.format(
put_method_name)
setattr(klass, from_method_name, classmethod(from_method))
# Generate a from_<type> method for each put_<type> method.
for put_method_name in dir(Term):
if not put_method_name.startswith('put_'):
continue
put_method = getattr(Term, put_method_name)
if not callable(put_method):
continue
try:
_add_from_method_to_class(Term, put_method_name, put_method)
except AttributeError as e:
if 'already exists' in str(e):
# Don't add if from_ already exists.
pass
else:
raise
class TemporaryTerm(Term, TemporaryHandleMixIn):
pass
class TermList(HandleWrapper):
"""A collection of term references.
Required by `Term.cons_functor_v` and `Query`.
"""
def __init__(self, length):
self._length = length
super().__init__(handle=PL_new_term_refs(length))
@classmethod
def from_terms(cls, *terms):
termlist = cls(len(terms))
for i, term in enumerate(terms):
termlist[i].put_term(term)
return termlist
def __eq__(self, other):
try:
return super().__eq__(other) and self._length == other._length
except AttributeError as e:
if '_handle' not in str(e):
raise
return NotImplemented
def __str__(self):
return str(list(self))
def __repr__(self):
return 'TermList(handle={handle!r}, length={length!r})'.format(
handle=self._handle,
length=self._length)
def __len__(self):
return self._length
def __getitem__(self, key):
if isinstance(key, int) and key >= 0 and key < self._length:
return Term._from_handle(self._handle + key)
else:
raise IndexError()
class Query():
"""Prolog Query Context Manager."""
_call_predicate = Predicate.from_name_arity('call', 1)
def __init__(self, predicate, *arguments, arglist=None,
goal_context_module=None):
"""Prepare a query.
A query consists of a predicate (`predicate`) and a list of arguments
(`arguments`). Each solution is an assignment to variables in
`arguments` that satisfies the predicate.
A query behaves statefully. The solutions must be read from
`arguments`.
Args:
predicate (Predicate) : Predicate to query.
*arguments (Term) : Terms to pass as arguments to
`predicate`.
arglist (TermList) : List of argument terms to
`predicate`. Cannot pass both arguments and arglist.
goal_context_module (Module): Context module of the goal.
If ``None``, the current context module is used, or ``user`` if
there is no context. This only matters for meta_predicates.
Note
----
Only one query can be active at a time, but the query is not activated
until `__enter__` is called.
"""
if arglist is None:
arglist = TermList.from_terms(*arguments)
elif arguments:
raise ValueError('Cannot provide both "arguments" and "arglist".')
predicate.check_argument_match(arglist)
self.predicate = predicate
self.arglist = arglist
self.goal_context_module = goal_context_module
self.active_query = None
@classmethod
def call_term(cls, term, goal_context_module=None):
"""Prepare a query that will call a single term.
Args:
term (Term) : Term to call.
goal_context_module (Module): Context module of the goal.
If ``None``, the current context module is used, or ``user`` if
there is no context. This only matters for meta_predicates.
See `Query.__init__` for more. Equivalent to:
``Query(Predicate.from_name_arity('call', 1), term)``
"""
return cls(Query._call_predicate, term,
goal_context_module=goal_context_module)
def __str__(self):
return '{pred}({args})'.format(
pred=str(self.predicate).rsplit('/', 1)[0],
args=', '.join(str(arg) for arg in self.arglist))
def __repr__(self):
return ('Query(predicate={predicate!r}, arglist={arglist!r}, '
'goal_context_module={goal_context_module!r})').format(
predicate=self.predicate,
arglist=self.arglist,
goal_context_module=self.goal_context_module)
def __enter__(self):
self.active_query = _ActiveQuery(self)
return self.active_query
def __exit__(self, type, value, traceback):
self.active_query.close()
def term_assignments(self, term, persistent):
"""The value of a term under each solution to the query.
Iterates over all remaining solutions to the query and, for each
solution, yields the current value of `term`.
Args:
term (Term): The term whose assignments to return.
persistent (bool): If True, `TermRecord` objects will be yielded
instead of `TemporaryTerm` so that their value persists
across solutions.
Yields:
Either `TemporaryTerm` or a `TermRecord` representing the
value of `term` under a particular solution.
If `persistent` is ``False``, then `TemporaryTerm` values are
yielded, which are invalidated on the next call to `next_solution`.
"""
if persistent:
yield from self._term_assignments_persistent(term)
else:
yield from self._term_assignments_temporary(term)
def _term_assignments_persistent(self, term):
with self as active_query:
while active_query.next_solution():
yield TermRecord(term)
def _term_assignments_temporary(self, term):
with self as active_query:
while active_query.next_solution():
temporary_term = TemporaryTerm.from_term(term)
active_query.bind_temporary_term(temporary_term)
yield temporary_term
class _ActiveQuery(HandleWrapper, TemporaryHandleMixIn):
"""Interface to an active Prolog Query.
Only one query can be active at a time.
"""
def __init__(self, query):
"""Create an active query. See `Query`
Args:
query (Query) : Query to activate.
"""
self._query = query
super().__init__(handle=PL_open_query(
_get_nullable_handle(query.goal_context_module),
PL_Q_NODEBUG | PL_Q_CATCH_EXCEPTION,
query.predicate._handle,
query.arglist._handle))
self._bound_temporary_terms = []
def next_solution(self):
"""Find the next solution, updating `arglist`.
Returns:
bool: ``True`` if a solution was found, otherwise returns
``False``.
Raises:
PrologException: If an exception was raised in Prolog.
Warning
-------
Calling `next_solution` results in backtracking.
All variable bindings and newly-created terms since the last call
will be undone.
Use `TermRecord` to persist terms across backtracks.
"""
success = bool(PL_next_solution(self._handle))
self._invalidate_bound_temporary_terms()
if not success:
exception_term = PL_exception(self._handle)
if exception_term:
raise PrologException(Term._from_handle(exception_term))
return success
def bind_temporary_term(self, term):
"""Bind a temporary term to the current solution state of this query.
The term will be invalidated on the next call to `next_solution`.
Args:
term (TemporaryTerm): Temporary term to bind.
"""
self._bound_temporary_terms.append(term)
def _invalidate_bound_temporary_terms(self):
for term in self._bound_temporary_terms:
term._invalidate()
self._bound_temporary_terms = []
def close(self):
"""Close the query and destroy all data and bindings associated with it.
"""
PL_close_query(self._handle)
self._invalidate()
def __str__(self):
return str(self._query)
def __repr__(self):
return ('_ActiveQuery(query={query!r}, _handle={handle!r})'.format(
query=self._query, _handle=self._handle))
def _get_nullable_handle(handle_wrapper):
"""Return the handle of `handle_wrapper` or None"""
if handle_wrapper is None:
return None
else:
return handle_wrapper._handle
class TermRecord(HandleWrapper):
"""Records a Prolog Term so that it can be retrieved later.
This persists across backtracks, unlike `Term` itself.
"""
def __init__(self, term):
"""Create a term record.
Args:
term (Term): Term to record.
"""
super().__init__(PL_record(term._handle))
def get(self):
"""Get the term that was stored in this object.
Returns:
Term: A copy of the stored term.
"""
t = Term()
success = PL_recorded(self._handle, t._handle)
if not success:
raise PrologMemoryError()
return t
def __del__(self):
PL_erase(self._handle)
class Frame(HandleWrapper, TemporaryHandleMixIn):
"""A prolog frame context.
All term references (and optionally, data modifications) created within the
frame are discarded at the close of the frame.
With close(), used to create temporary term refences.
With discard(), used to undo unifications and other data modifications.
It is best to use the frame in a python context. i.e.:
>>> X = Term()
>>> eq = Predicate.from_name_arity('=', 2)
>>> with Frame() as frame:
... for i in range(3):
... t = frame.term()
... t.put_integer(i)
... eq(X, t) and None
... print(X)
... frame.rewind()
0
1
2
Warning:
Term objects created using the `Term` class constructors after the
frame is opened will produce undefined behaviour (likely segfault) if
used after the frame is closed, discarded, or rewound. Instead, use
the `term()` method to get `Term` objects with proper error handling.
Warning:
While the SWI-Prolog documentation doesn't specifically warn against
it, it is probably a bad idea to open and close multiple frames in
anything other than stack order.
Note:
Frames have no effect on the prolog dynamic database (assertz).
"""
def __init__(self, discard=False):
"""Open the frame.
Args:
discard (bool): If true, __exit__ calls discard() instead of
close().
"""
super().__init__(handle=PL_open_foreign_frame())
self.discard_on_exit = discard
self._associated_terms = []
def close(self):
"""Close the frame.
Discard all term references created since the frame was opened,
retaining all other prolog data.
"""
self._invalidate_associated_terms()
PL_close_foreign_frame(self._handle)
self._invalidate()
def discard(self):
"""Discard the frame.
Discard all term references, bindings, and prolog data created since
the frame was opened.
"""
self._invalidate_associated_terms()
PL_discard_foreign_frame(self._handle)
self._invalidate()
def rewind(self):
"""Rewind the frame.
Undo all bindings and discard all term references created since the
frame was opened. Does not pop the frame.
"""
self._invalidate_associated_terms()
PL_rewind_foreign_frame(self._handle)
def term(self):
"""Safely create Term objects within this frame.
The returned terms will be invalidated _in Python_ when this frame is
closed, discarded, or rewound.
Term objects created within the frame using the `Term` class will by
invalidated in Prolog at the end of the frame, but _not_ in Python.
As a result, continuing to use those objects will produce undefined
behaviour, likely a segfault.
Conversely, the `TemporaryTerm` objects returned by this method will
produce a catachable Python exception if used after invalidation,
rather than immediately terminating the program with a segfault.
"""
term = TemporaryTerm()
self._associated_terms.append(term)
return term
def __enter__(self):
return self
def __exit__(self, exception_type, exception_value, traceback):
if self.discard_on_exit:
self.discard()
else:
self.close()
def _invalidate_associated_terms(self):
for term in self._associated_terms:
term._invalidate()
self._associated_terms = []
|
mit
| -278,160,444,804,315,000 | 30.287293 | 80 | 0.567858 | false | 4.042754 | false | false | false |
mikel-egana-aranguren/SADI-Galaxy-Docker
|
galaxy-dist/lib/galaxy/util/plugin_config.py
|
1
|
2386
|
from xml.etree import ElementTree
try:
from galaxy import eggs
eggs.require('PyYAML')
except Exception:
# If not in Galaxy, ignore this.
pass
try:
import yaml
except ImportError:
yaml = None
from galaxy.util.submodules import submodules
def plugins_dict(module, plugin_type_identifier):
""" Walk through all classes in submodules of module and find ones labelled
with specified plugin_type_identifier and throw in a dictionary to allow
constructions from plugins by these types later on.
"""
plugin_dict = {}
for plugin_module in submodules( module ):
# FIXME: this is not how one is suppose to use __all__ why did you do
# this past John?
for clazz in plugin_module.__all__:
plugin_type = getattr( clazz, plugin_type_identifier, None )
if plugin_type:
plugin_dict[ plugin_type ] = clazz
return plugin_dict
def load_plugins(plugins_dict, plugin_source, extra_kwds={}):
source_type, source = plugin_source
if source_type == "xml":
return __load_plugins_from_element(plugins_dict, source, extra_kwds)
else:
return __load_plugins_from_dicts(plugins_dict, source, extra_kwds)
def __load_plugins_from_element(plugins_dict, plugins_element, extra_kwds):
plugins = []
for plugin_element in plugins_element.getchildren():
plugin_type = plugin_element.tag
plugin_kwds = dict( plugin_element.items() )
plugin_kwds.update( extra_kwds )
plugin = plugins_dict[ plugin_type ]( **plugin_kwds )
plugins.append( plugin )
return plugins
def __load_plugins_from_dicts(plugins_dict, configs, extra_kwds):
plugins = []
for config in configs:
plugin_type = config[ "type" ]
plugin_kwds = config
plugin_kwds.update( extra_kwds )
plugin = plugins_dict[ plugin_type ]( **plugin_kwds )
plugins.append( plugin )
return plugins
def plugin_source_from_path(path):
if path.endswith(".yaml") or path.endswith(".yml"):
return ('dict', __read_yaml(path))
else:
return ('xml', ElementTree.parse( path ).getroot())
def __read_yaml(path):
if yaml is None:
raise ImportError("Attempting to read YAML configuration file - but PyYAML dependency unavailable.")
with open(path, "rb") as f:
return yaml.load(f)
|
gpl-3.0
| -1,747,664,163,855,154,200 | 28.45679 | 108 | 0.650042 | false | 3.943802 | true | false | false |
jakevdp/scipy
|
scipy/special/orthogonal.py
|
1
|
58856
|
"""
A collection of functions to find the weights and abscissas for
Gaussian Quadrature.
These calculations are done by finding the eigenvalues of a
tridiagonal matrix whose entries are dependent on the coefficients
in the recursion formula for the orthogonal polynomials with the
corresponding weighting function over the interval.
Many recursion relations for orthogonal polynomials are given:
.. math::
a1n f_{n+1} (x) = (a2n + a3n x ) f_n (x) - a4n f_{n-1} (x)
The recursion relation of interest is
.. math::
P_{n+1} (x) = (x - A_n) P_n (x) - B_n P_{n-1} (x)
where :math:`P` has a different normalization than :math:`f`.
The coefficients can be found as:
.. math::
A_n = -a2n / a3n
\\qquad
B_n = ( a4n / a3n \\sqrt{h_n-1 / h_n})^2
where
.. math::
h_n = \\int_a^b w(x) f_n(x)^2
assume:
.. math::
P_0 (x) = 1
\\qquad
P_{-1} (x) == 0
For the mathematical background, see [golub.welsch-1969-mathcomp]_ and
[abramowitz.stegun-1965]_.
References
----------
.. [golub.welsch-1969-mathcomp]
Golub, Gene H, and John H Welsch. 1969. Calculation of Gauss
Quadrature Rules. *Mathematics of Computation* 23, 221-230+s1--s10.
.. [abramowitz.stegun-1965]
Abramowitz, Milton, and Irene A Stegun. (1965) *Handbook of
Mathematical Functions: with Formulas, Graphs, and Mathematical
Tables*. Gaithersburg, MD: National Bureau of Standards.
http://www.math.sfu.ca/~cbm/aands/
.. [townsend.trogdon.olver-2014]
Townsend, A. and Trogdon, T. and Olver, S. (2014)
*Fast computation of Gauss quadrature nodes and
weights on the whole real line*. :arXiv:`1410.5286`.
.. [townsend.trogdon.olver-2015]
Townsend, A. and Trogdon, T. and Olver, S. (2015)
*Fast computation of Gauss quadrature nodes and
weights on the whole real line*.
IMA Journal of Numerical Analysis
:doi:`10.1093/imanum/drv002`.
"""
#
# Author: Travis Oliphant 2000
# Updated Sep. 2003 (fixed bugs --- tested to be accurate)
from __future__ import division, print_function, absolute_import
# Scipy imports.
import numpy as np
from numpy import (exp, inf, pi, sqrt, floor, sin, cos, around, int,
hstack, arccos, arange)
from scipy import linalg
from scipy.special import airy
# Local imports.
from . import _ufuncs as cephes
_gam = cephes.gamma
from . import specfun
_polyfuns = ['legendre', 'chebyt', 'chebyu', 'chebyc', 'chebys',
'jacobi', 'laguerre', 'genlaguerre', 'hermite',
'hermitenorm', 'gegenbauer', 'sh_legendre', 'sh_chebyt',
'sh_chebyu', 'sh_jacobi']
# Correspondence between new and old names of root functions
_rootfuns_map = {'roots_legendre': 'p_roots',
'roots_chebyt': 't_roots',
'roots_chebyu': 'u_roots',
'roots_chebyc': 'c_roots',
'roots_chebys': 's_roots',
'roots_jacobi': 'j_roots',
'roots_laguerre': 'l_roots',
'roots_genlaguerre': 'la_roots',
'roots_hermite': 'h_roots',
'roots_hermitenorm': 'he_roots',
'roots_gegenbauer': 'cg_roots',
'roots_sh_legendre': 'ps_roots',
'roots_sh_chebyt': 'ts_roots',
'roots_sh_chebyu': 'us_roots',
'roots_sh_jacobi': 'js_roots'}
_evalfuns = ['eval_legendre', 'eval_chebyt', 'eval_chebyu',
'eval_chebyc', 'eval_chebys', 'eval_jacobi',
'eval_laguerre', 'eval_genlaguerre', 'eval_hermite',
'eval_hermitenorm', 'eval_gegenbauer',
'eval_sh_legendre', 'eval_sh_chebyt', 'eval_sh_chebyu',
'eval_sh_jacobi']
__all__ = _polyfuns + list(_rootfuns_map.keys()) + _evalfuns + ['poch', 'binom']
class orthopoly1d(np.poly1d):
def __init__(self, roots, weights=None, hn=1.0, kn=1.0, wfunc=None,
limits=None, monic=False, eval_func=None):
equiv_weights = [weights[k] / wfunc(roots[k]) for
k in range(len(roots))]
mu = sqrt(hn)
if monic:
evf = eval_func
if evf:
eval_func = lambda x: evf(x) / kn
mu = mu / abs(kn)
kn = 1.0
# compute coefficients from roots, then scale
poly = np.poly1d(roots, r=True)
np.poly1d.__init__(self, poly.coeffs * float(kn))
# TODO: In numpy 1.13, there is no need to use __dict__ to access attributes
self.__dict__['weights'] = np.array(list(zip(roots,
weights, equiv_weights)))
self.__dict__['weight_func'] = wfunc
self.__dict__['limits'] = limits
self.__dict__['normcoef'] = mu
# Note: eval_func will be discarded on arithmetic
self.__dict__['_eval_func'] = eval_func
def __call__(self, v):
if self._eval_func and not isinstance(v, np.poly1d):
return self._eval_func(v)
else:
return np.poly1d.__call__(self, v)
def _scale(self, p):
if p == 1.0:
return
try:
self._coeffs
except AttributeError:
self.__dict__['coeffs'] *= p
else:
# the coeffs attr is be made private in future versions of numpy
self._coeffs *= p
evf = self._eval_func
if evf:
self.__dict__['_eval_func'] = lambda x: evf(x) * p
self.__dict__['normcoef'] *= p
def _gen_roots_and_weights(n, mu0, an_func, bn_func, f, df, symmetrize, mu):
"""[x,w] = gen_roots_and_weights(n,an_func,sqrt_bn_func,mu)
Returns the roots (x) of an nth order orthogonal polynomial,
and weights (w) to use in appropriate Gaussian quadrature with that
orthogonal polynomial.
The polynomials have the recurrence relation
P_n+1(x) = (x - A_n) P_n(x) - B_n P_n-1(x)
an_func(n) should return A_n
sqrt_bn_func(n) should return sqrt(B_n)
mu ( = h_0 ) is the integral of the weight over the orthogonal
interval
"""
k = np.arange(n, dtype='d')
c = np.zeros((2, n))
c[0,1:] = bn_func(k[1:])
c[1,:] = an_func(k)
x = linalg.eigvals_banded(c, overwrite_a_band=True)
# improve roots by one application of Newton's method
y = f(n, x)
dy = df(n, x)
x -= y/dy
fm = f(n-1, x)
fm /= np.abs(fm).max()
dy /= np.abs(dy).max()
w = 1.0 / (fm * dy)
if symmetrize:
w = (w + w[::-1]) / 2
x = (x - x[::-1]) / 2
w *= mu0 / w.sum()
if mu:
return x, w, mu0
else:
return x, w
# Jacobi Polynomials 1 P^(alpha,beta)_n(x)
def roots_jacobi(n, alpha, beta, mu=False):
r"""Gauss-Jacobi quadrature.
Computes the sample points and weights for Gauss-Jacobi quadrature. The
sample points are the roots of the n-th degree Jacobi polynomial,
:math:`P^{\alpha, \beta}_n(x)`. These sample points and weights
correctly integrate polynomials of degree :math:`2n - 1` or less over the
interval :math:`[-1, 1]` with weight function
:math:`f(x) = (1 - x)^{\alpha} (1 + x)^{\beta}`.
Parameters
----------
n : int
quadrature order
alpha : float
alpha must be > -1
beta : float
beta must be > 0
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
"""
m = int(n)
if n < 1 or n != m:
raise ValueError("n must be a positive integer.")
if alpha <= -1 or beta <= -1:
raise ValueError("alpha and beta must be greater than -1.")
if alpha == 0.0 and beta == 0.0:
return roots_legendre(m, mu)
if alpha == beta:
return roots_gegenbauer(m, alpha+0.5, mu)
mu0 = 2.0**(alpha+beta+1)*cephes.beta(alpha+1, beta+1)
a = alpha
b = beta
if a + b == 0.0:
an_func = lambda k: np.where(k == 0, (b-a)/(2+a+b), 0.0)
else:
an_func = lambda k: np.where(k == 0, (b-a)/(2+a+b),
(b*b - a*a) / ((2.0*k+a+b)*(2.0*k+a+b+2)))
bn_func = lambda k: 2.0 / (2.0*k+a+b)*np.sqrt((k+a)*(k+b) / (2*k+a+b+1)) \
* np.where(k == 1, 1.0, np.sqrt(k*(k+a+b) / (2.0*k+a+b-1)))
f = lambda n, x: cephes.eval_jacobi(n, a, b, x)
df = lambda n, x: 0.5 * (n + a + b + 1) \
* cephes.eval_jacobi(n-1, a+1, b+1, x)
return _gen_roots_and_weights(m, mu0, an_func, bn_func, f, df, False, mu)
def jacobi(n, alpha, beta, monic=False):
r"""Jacobi polynomial.
Defined to be the solution of
.. math::
(1 - x^2)\frac{d^2}{dx^2}P_n^{(\alpha, \beta)}
+ (\beta - \alpha - (\alpha + \beta + 2)x)
\frac{d}{dx}P_n^{(\alpha, \beta)}
+ n(n + \alpha + \beta + 1)P_n^{(\alpha, \beta)} = 0
for :math:`\alpha, \beta > -1`; :math:`P_n^{(\alpha, \beta)}` is a
polynomial of degree :math:`n`.
Parameters
----------
n : int
Degree of the polynomial.
alpha : float
Parameter, must be greater than -1.
beta : float
Parameter, must be greater than -1.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
P : orthopoly1d
Jacobi polynomial.
Notes
-----
For fixed :math:`\alpha, \beta`, the polynomials
:math:`P_n^{(\alpha, \beta)}` are orthogonal over :math:`[-1, 1]`
with weight function :math:`(1 - x)^\alpha(1 + x)^\beta`.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
wfunc = lambda x: (1 - x)**alpha * (1 + x)**beta
if n == 0:
return orthopoly1d([], [], 1.0, 1.0, wfunc, (-1, 1), monic,
eval_func=np.ones_like)
x, w, mu = roots_jacobi(n, alpha, beta, mu=True)
ab1 = alpha + beta + 1.0
hn = 2**ab1 / (2 * n + ab1) * _gam(n + alpha + 1)
hn *= _gam(n + beta + 1.0) / _gam(n + 1) / _gam(n + ab1)
kn = _gam(2 * n + ab1) / 2.0**n / _gam(n + 1) / _gam(n + ab1)
# here kn = coefficient on x^n term
p = orthopoly1d(x, w, hn, kn, wfunc, (-1, 1), monic,
lambda x: eval_jacobi(n, alpha, beta, x))
return p
# Jacobi Polynomials shifted G_n(p,q,x)
def roots_sh_jacobi(n, p1, q1, mu=False):
"""Gauss-Jacobi (shifted) quadrature.
Computes the sample points and weights for Gauss-Jacobi (shifted)
quadrature. The sample points are the roots of the n-th degree shifted
Jacobi polynomial, :math:`G^{p,q}_n(x)`. These sample points and weights
correctly integrate polynomials of degree :math:`2n - 1` or less over the
interval :math:`[0, 1]` with weight function
:math:`f(x) = (1 - x)^{p-q} x^{q-1}`
Parameters
----------
n : int
quadrature order
p1 : float
(p1 - q1) must be > -1
q1 : float
q1 must be > 0
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
"""
if (p1-q1) <= -1 or q1 <= 0:
raise ValueError("(p - q) must be greater than -1, and q must be greater than 0.")
x, w, m = roots_jacobi(n, p1-q1, q1-1, True)
x = (x + 1) / 2
scale = 2.0**p1
w /= scale
m /= scale
if mu:
return x, w, m
else:
return x, w
def sh_jacobi(n, p, q, monic=False):
r"""Shifted Jacobi polynomial.
Defined by
.. math::
G_n^{(p, q)}(x)
= \binom{2n + p - 1}{n}^{-1}P_n^{(p - q, q - 1)}(2x - 1),
where :math:`P_n^{(\cdot, \cdot)}` is the nth Jacobi polynomial.
Parameters
----------
n : int
Degree of the polynomial.
p : float
Parameter, must have :math:`p > q - 1`.
q : float
Parameter, must be greater than 0.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
G : orthopoly1d
Shifted Jacobi polynomial.
Notes
-----
For fixed :math:`p, q`, the polynomials :math:`G_n^{(p, q)}` are
orthogonal over :math:`[0, 1]` with weight function :math:`(1 -
x)^{p - q}x^{q - 1}`.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
wfunc = lambda x: (1.0 - x)**(p - q) * (x)**(q - 1.)
if n == 0:
return orthopoly1d([], [], 1.0, 1.0, wfunc, (-1, 1), monic,
eval_func=np.ones_like)
n1 = n
x, w, mu0 = roots_sh_jacobi(n1, p, q, mu=True)
hn = _gam(n + 1) * _gam(n + q) * _gam(n + p) * _gam(n + p - q + 1)
hn /= (2 * n + p) * (_gam(2 * n + p)**2)
# kn = 1.0 in standard form so monic is redundant. Kept for compatibility.
kn = 1.0
pp = orthopoly1d(x, w, hn, kn, wfunc=wfunc, limits=(0, 1), monic=monic,
eval_func=lambda x: eval_sh_jacobi(n, p, q, x))
return pp
# Generalized Laguerre L^(alpha)_n(x)
def roots_genlaguerre(n, alpha, mu=False):
r"""Gauss-generalized Laguerre quadrature.
Computes the sample points and weights for Gauss-generalized Laguerre
quadrature. The sample points are the roots of the n-th degree generalized
Laguerre polynomial, :math:`L^{\alpha}_n(x)`. These sample points and
weights correctly integrate polynomials of degree :math:`2n - 1` or less
over the interval :math:`[0, \infty]` with weight function
:math:`f(x) = x^{\alpha} e^{-x}`.
Parameters
----------
n : int
quadrature order
alpha : float
alpha must be > -1
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
"""
m = int(n)
if n < 1 or n != m:
raise ValueError("n must be a positive integer.")
if alpha < -1:
raise ValueError("alpha must be greater than -1.")
mu0 = cephes.gamma(alpha + 1)
if m == 1:
x = np.array([alpha+1.0], 'd')
w = np.array([mu0], 'd')
if mu:
return x, w, mu0
else:
return x, w
an_func = lambda k: 2 * k + alpha + 1
bn_func = lambda k: -np.sqrt(k * (k + alpha))
f = lambda n, x: cephes.eval_genlaguerre(n, alpha, x)
df = lambda n, x: (n*cephes.eval_genlaguerre(n, alpha, x)
- (n + alpha)*cephes.eval_genlaguerre(n-1, alpha, x))/x
return _gen_roots_and_weights(m, mu0, an_func, bn_func, f, df, False, mu)
def genlaguerre(n, alpha, monic=False):
r"""Generalized (associated) Laguerre polynomial.
Defined to be the solution of
.. math::
x\frac{d^2}{dx^2}L_n^{(\alpha)}
+ (\alpha + 1 - x)\frac{d}{dx}L_n^{(\alpha)}
+ nL_n^{(\alpha)} = 0,
where :math:`\alpha > -1`; :math:`L_n^{(\alpha)}` is a polynomial
of degree :math:`n`.
Parameters
----------
n : int
Degree of the polynomial.
alpha : float
Parameter, must be greater than -1.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
L : orthopoly1d
Generalized Laguerre polynomial.
Notes
-----
For fixed :math:`\alpha`, the polynomials :math:`L_n^{(\alpha)}`
are orthogonal over :math:`[0, \infty)` with weight function
:math:`e^{-x}x^\alpha`.
The Laguerre polynomials are the special case where :math:`\alpha
= 0`.
See Also
--------
laguerre : Laguerre polynomial.
"""
if alpha <= -1:
raise ValueError("alpha must be > -1")
if n < 0:
raise ValueError("n must be nonnegative.")
if n == 0:
n1 = n + 1
else:
n1 = n
x, w, mu0 = roots_genlaguerre(n1, alpha, mu=True)
wfunc = lambda x: exp(-x) * x**alpha
if n == 0:
x, w = [], []
hn = _gam(n + alpha + 1) / _gam(n + 1)
kn = (-1)**n / _gam(n + 1)
p = orthopoly1d(x, w, hn, kn, wfunc, (0, inf), monic,
lambda x: eval_genlaguerre(n, alpha, x))
return p
# Laguerre L_n(x)
def roots_laguerre(n, mu=False):
r"""Gauss-Laguerre quadrature.
Computes the sample points and weights for Gauss-Laguerre quadrature.
The sample points are the roots of the n-th degree Laguerre polynomial,
:math:`L_n(x)`. These sample points and weights correctly integrate
polynomials of degree :math:`2n - 1` or less over the interval
:math:`[0, \infty]` with weight function :math:`f(x) = e^{-x}`.
Parameters
----------
n : int
quadrature order
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
numpy.polynomial.laguerre.laggauss
"""
return roots_genlaguerre(n, 0.0, mu=mu)
def laguerre(n, monic=False):
r"""Laguerre polynomial.
Defined to be the solution of
.. math::
x\frac{d^2}{dx^2}L_n + (1 - x)\frac{d}{dx}L_n + nL_n = 0;
:math:`L_n` is a polynomial of degree :math:`n`.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
L : orthopoly1d
Laguerre Polynomial.
Notes
-----
The polynomials :math:`L_n` are orthogonal over :math:`[0,
\infty)` with weight function :math:`e^{-x}`.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
if n == 0:
n1 = n + 1
else:
n1 = n
x, w, mu0 = roots_laguerre(n1, mu=True)
if n == 0:
x, w = [], []
hn = 1.0
kn = (-1)**n / _gam(n + 1)
p = orthopoly1d(x, w, hn, kn, lambda x: exp(-x), (0, inf), monic,
lambda x: eval_laguerre(n, x))
return p
# Hermite 1 H_n(x)
def roots_hermite(n, mu=False):
r"""Gauss-Hermite (physicst's) quadrature.
Computes the sample points and weights for Gauss-Hermite quadrature.
The sample points are the roots of the n-th degree Hermite polynomial,
:math:`H_n(x)`. These sample points and weights correctly integrate
polynomials of degree :math:`2n - 1` or less over the interval
:math:`[-\infty, \infty]` with weight function :math:`f(x) = e^{-x^2}`.
Parameters
----------
n : int
quadrature order
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
Notes
-----
For small n up to 150 a modified version of the Golub-Welsch
algorithm is used. Nodes are computed from the eigenvalue
problem and improved by one step of a Newton iteration.
The weights are computed from the well-known analytical formula.
For n larger than 150 an optimal asymptotic algorithm is applied
which computes nodes and weights in a numerically stable manner.
The algorithm has linear runtime making computation for very
large n (several thousand or more) feasible.
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
numpy.polynomial.hermite.hermgauss
roots_hermitenorm
References
----------
.. [townsend.trogdon.olver-2014]
Townsend, A. and Trogdon, T. and Olver, S. (2014)
*Fast computation of Gauss quadrature nodes and
weights on the whole real line*. :arXiv:`1410.5286`.
.. [townsend.trogdon.olver-2015]
Townsend, A. and Trogdon, T. and Olver, S. (2015)
*Fast computation of Gauss quadrature nodes and
weights on the whole real line*.
IMA Journal of Numerical Analysis
:doi:`10.1093/imanum/drv002`.
"""
m = int(n)
if n < 1 or n != m:
raise ValueError("n must be a positive integer.")
mu0 = np.sqrt(np.pi)
if n <= 150:
an_func = lambda k: 0.0*k
bn_func = lambda k: np.sqrt(k/2.0)
f = cephes.eval_hermite
df = lambda n, x: 2.0 * n * cephes.eval_hermite(n-1, x)
return _gen_roots_and_weights(m, mu0, an_func, bn_func, f, df, True, mu)
else:
nodes, weights = _roots_hermite_asy(m)
if mu:
return nodes, weights, mu0
else:
return nodes, weights
def _compute_tauk(n, k, maxit=5):
"""Helper function for Tricomi initial guesses
For details, see formula 3.1 in lemma 3.1 in the
original paper.
Parameters
----------
n : int
Quadrature order
k : ndarray of type int
Index of roots :math:`\tau_k` to compute
maxit : int
Number of Newton maxit performed, the default
value of 5 is sufficient.
Returns
-------
tauk : ndarray
Roots of equation 3.1
See Also
--------
initial_nodes_a
roots_hermite_asy
"""
a = n % 2 - 0.5
c = (4.0*floor(n/2.0) - 4.0*k + 3.0)*pi / (4.0*floor(n/2.0) + 2.0*a + 2.0)
f = lambda x: x - sin(x) - c
df = lambda x: 1.0 - cos(x)
xi = 0.5*pi
for i in range(maxit):
xi = xi - f(xi)/df(xi)
return xi
def _initial_nodes_a(n, k):
r"""Tricomi initial guesses
Computes an initial approximation to the square of the `k`-th
(positive) root :math:`x_k` of the Hermite polynomial :math:`H_n`
of order :math:`n`. The formula is the one from lemma 3.1 in the
original paper. The guesses are accurate except in the region
near :math:`\sqrt{2n + 1}`.
Parameters
----------
n : int
Quadrature order
k : ndarray of type int
Index of roots to compute
Returns
-------
xksq : ndarray
Square of the approximate roots
See Also
--------
initial_nodes
roots_hermite_asy
"""
tauk = _compute_tauk(n, k)
sigk = cos(0.5*tauk)**2
a = n % 2 - 0.5
nu = 4.0*floor(n/2.0) + 2.0*a + 2.0
# Initial approximation of Hermite roots (square)
xksq = nu*sigk - 1.0/(3.0*nu) * (5.0/(4.0*(1.0-sigk)**2) - 1.0/(1.0-sigk) - 0.25)
return xksq
def _initial_nodes_b(n, k):
r"""Gatteschi initial guesses
Computes an initial approximation to the square of the `k`-th
(positive) root :math:`x_k` of the Hermite polynomial :math:`H_n`
of order :math:`n`. The formula is the one from lemma 3.2 in the
original paper. The guesses are accurate in the region just
below :math:`\sqrt{2n + 1}`.
Parameters
----------
n : int
Quadrature order
k : ndarray of type int
Index of roots to compute
Returns
-------
xksq : ndarray
Square of the approximate root
See Also
--------
initial_nodes
roots_hermite_asy
"""
a = n % 2 - 0.5
nu = 4.0*floor(n/2.0) + 2.0*a + 2.0
# Airy roots by approximation
ak = specfun.airyzo(k.max(), 1)[0][::-1]
# Initial approximation of Hermite roots (square)
xksq = (nu +
2.0**(2.0/3.0) * ak * nu**(1.0/3.0) +
1.0/5.0 * 2.0**(4.0/3.0) * ak**2 * nu**(-1.0/3.0) +
(9.0/140.0 - 12.0/175.0 * ak**3) * nu**(-1.0) +
(16.0/1575.0 * ak + 92.0/7875.0 * ak**4) * 2.0**(2.0/3.0) * nu**(-5.0/3.0) -
(15152.0/3031875.0 * ak**5 + 1088.0/121275.0 * ak**2) * 2.0**(1.0/3.0) * nu**(-7.0/3.0))
return xksq
def _initial_nodes(n):
"""Initial guesses for the Hermite roots
Computes an initial approximation to the non-negative
roots :math:`x_k` of the Hermite polynomial :math:`H_n`
of order :math:`n`. The Tricomi and Gatteschi initial
guesses are used in the region where they are accurate.
Parameters
----------
n : int
Quadrature order
Returns
-------
xk : ndarray
Approximate roots
See Also
--------
roots_hermite_asy
"""
# Turnover point
# linear polynomial fit to error of 10, 25, 40, ..., 1000 point rules
fit = 0.49082003*n - 4.37859653
turnover = around(fit).astype(int)
# Compute all approximations
ia = arange(1, int(floor(n*0.5)+1))
ib = ia[::-1]
xasq = _initial_nodes_a(n, ia[:turnover+1])
xbsq = _initial_nodes_b(n, ib[turnover+1:])
# Combine
iv = sqrt(hstack([xasq, xbsq]))
# Central node is always zero
if n % 2 == 1:
iv = hstack([0.0, iv])
return iv
def _pbcf(n, theta):
r"""Asymptotic series expansion of parabolic cylinder function
The implementation is based on sections 3.2 and 3.3 from the
original paper. Compared to the published version this code
adds one more term to the asymptotic series. The detailed
formulas can be found at [parabolic-asymptotics]_. The evaluation
is done in a transformed variable :math:`\theta := \arccos(t)`
where :math:`t := x / \mu` and :math:`\mu := \sqrt{2n + 1}`.
Parameters
----------
n : int
Quadrature order
theta : ndarray
Transformed position variable
Returns
-------
U : ndarray
Value of the parabolic cylinder function :math:`U(a, \theta)`.
Ud : ndarray
Value of the derivative :math:`U^{\prime}(a, \theta)` of
the parabolic cylinder function.
See Also
--------
roots_hermite_asy
References
----------
.. [parabolic-asymptotics]
http://dlmf.nist.gov/12.10#vii
"""
st = sin(theta)
ct = cos(theta)
# http://dlmf.nist.gov/12.10#vii
mu = 2.0*n + 1.0
# http://dlmf.nist.gov/12.10#E23
eta = 0.5*theta - 0.5*st*ct
# http://dlmf.nist.gov/12.10#E39
zeta = -(3.0*eta/2.0) ** (2.0/3.0)
# http://dlmf.nist.gov/12.10#E40
phi = (-zeta / st**2) ** (0.25)
# Coefficients
# http://dlmf.nist.gov/12.10#E43
a0 = 1.0
a1 = 0.10416666666666666667
a2 = 0.08355034722222222222
a3 = 0.12822657455632716049
a4 = 0.29184902646414046425
a5 = 0.88162726744375765242
b0 = 1.0
b1 = -0.14583333333333333333
b2 = -0.09874131944444444444
b3 = -0.14331205391589506173
b4 = -0.31722720267841354810
b5 = -0.94242914795712024914
# Polynomials
# http://dlmf.nist.gov/12.10#E9
# http://dlmf.nist.gov/12.10#E10
ctp = ct ** arange(16).reshape((-1,1))
u0 = 1.0
u1 = (1.0*ctp[3,:] - 6.0*ct) / 24.0
u2 = (-9.0*ctp[4,:] + 249.0*ctp[2,:] + 145.0) / 1152.0
u3 = (-4042.0*ctp[9,:] + 18189.0*ctp[7,:] - 28287.0*ctp[5,:] - 151995.0*ctp[3,:] - 259290.0*ct) / 414720.0
u4 = (72756.0*ctp[10,:] - 321339.0*ctp[8,:] - 154982.0*ctp[6,:] + 50938215.0*ctp[4,:] + 122602962.0*ctp[2,:] + 12773113.0) / 39813120.0
u5 = (82393456.0*ctp[15,:] - 617950920.0*ctp[13,:] + 1994971575.0*ctp[11,:] - 3630137104.0*ctp[9,:] + 4433574213.0*ctp[7,:]
- 37370295816.0*ctp[5,:] - 119582875013.0*ctp[3,:] - 34009066266.0*ct) / 6688604160.0
v0 = 1.0
v1 = (1.0*ctp[3,:] + 6.0*ct) / 24.0
v2 = (15.0*ctp[4,:] - 327.0*ctp[2,:] - 143.0) / 1152.0
v3 = (-4042.0*ctp[9,:] + 18189.0*ctp[7,:] - 36387.0*ctp[5,:] + 238425.0*ctp[3,:] + 259290.0*ct) / 414720.0
v4 = (-121260.0*ctp[10,:] + 551733.0*ctp[8,:] - 151958.0*ctp[6,:] - 57484425.0*ctp[4,:] - 132752238.0*ctp[2,:] - 12118727) / 39813120.0
v5 = (82393456.0*ctp[15,:] - 617950920.0*ctp[13,:] + 2025529095.0*ctp[11,:] - 3750839308.0*ctp[9,:] + 3832454253.0*ctp[7,:]
+ 35213253348.0*ctp[5,:] + 130919230435.0*ctp[3,:] + 34009066266*ct) / 6688604160.0
# Airy Evaluation (Bi and Bip unused)
Ai, Aip, Bi, Bip = airy(mu**(4.0/6.0) * zeta)
# Prefactor for U
P = 2.0*sqrt(pi) * mu**(1.0/6.0) * phi
# Terms for U
# http://dlmf.nist.gov/12.10#E42
phip = phi ** arange(6, 31, 6).reshape((-1,1))
A0 = b0*u0
A1 = (b2*u0 + phip[0,:]*b1*u1 + phip[1,:]*b0*u2) / zeta**3
A2 = (b4*u0 + phip[0,:]*b3*u1 + phip[1,:]*b2*u2 + phip[2,:]*b1*u3 + phip[3,:]*b0*u4) / zeta**6
B0 = -(a1*u0 + phip[0,:]*a0*u1) / zeta**2
B1 = -(a3*u0 + phip[0,:]*a2*u1 + phip[1,:]*a1*u2 + phip[2,:]*a0*u3) / zeta**5
B2 = -(a5*u0 + phip[0,:]*a4*u1 + phip[1,:]*a3*u2 + phip[2,:]*a2*u3 + phip[3,:]*a1*u4 + phip[4,:]*a0*u5) / zeta**8
# U
# http://dlmf.nist.gov/12.10#E35
U = P * (Ai * (A0 + A1/mu**2.0 + A2/mu**4.0) +
Aip * (B0 + B1/mu**2.0 + B2/mu**4.0) / mu**(8.0/6.0))
# Prefactor for derivative of U
Pd = sqrt(2.0*pi) * mu**(2.0/6.0) / phi
# Terms for derivative of U
# http://dlmf.nist.gov/12.10#E46
C0 = -(b1*v0 + phip[0,:]*b0*v1) / zeta
C1 = -(b3*v0 + phip[0,:]*b2*v1 + phip[1,:]*b1*v2 + phip[2,:]*b0*v3) / zeta**4
C2 = -(b5*v0 + phip[0,:]*b4*v1 + phip[1,:]*b3*v2 + phip[2,:]*b2*v3 + phip[3,:]*b1*v4 + phip[4,:]*b0*v5) / zeta**7
D0 = a0*v0
D1 = (a2*v0 + phip[0,:]*a1*v1 + phip[1,:]*a0*v2) / zeta**3
D2 = (a4*v0 + phip[0,:]*a3*v1 + phip[1,:]*a2*v2 + phip[2,:]*a1*v3 + phip[3,:]*a0*v4) / zeta**6
# Derivative of U
# http://dlmf.nist.gov/12.10#E36
Ud = Pd * (Ai * (C0 + C1/mu**2.0 + C2/mu**4.0) / mu**(4.0/6.0) +
Aip * (D0 + D1/mu**2.0 + D2/mu**4.0))
return U, Ud
def _newton(n, x_initial, maxit=5):
"""Newton iteration for polishing the asymptotic approximation
to the zeros of the Hermite polynomials.
Parameters
----------
n : int
Quadrature order
x_initial : ndarray
Initial guesses for the roots
maxit : int
Maximal number of Newton iterations.
The default 5 is sufficient, usually
only one or two steps are needed.
Returns
-------
nodes : ndarray
Quadrature nodes
weights : ndarray
Quadrature weights
See Also
--------
roots_hermite_asy
"""
# Variable transformation
mu = sqrt(2.0*n + 1.0)
t = x_initial / mu
theta = arccos(t)
# Newton iteration
for i in range(maxit):
u, ud = _pbcf(n, theta)
dtheta = u / (sqrt(2.0) * mu * sin(theta) * ud)
theta = theta + dtheta
if max(abs(dtheta)) < 1e-14:
break
# Undo variable transformation
x = mu * cos(theta)
# Central node is always zero
if n % 2 == 1:
x[0] = 0.0
# Compute weights
w = exp(-x**2) / (2.0*ud**2)
return x, w
def _roots_hermite_asy(n):
r"""Gauss-Hermite (physicst's) quadrature for large n.
Computes the sample points and weights for Gauss-Hermite quadrature.
The sample points are the roots of the n-th degree Hermite polynomial,
:math:`H_n(x)`. These sample points and weights correctly integrate
polynomials of degree :math:`2n - 1` or less over the interval
:math:`[-\infty, \infty]` with weight function :math:`f(x) = e^{-x^2}`.
This method relies on asymptotic expansions which work best for n > 150.
The algorithm has linear runtime making computation for very large n
feasible.
Parameters
----------
n : int
quadrature order
Returns
-------
nodes : ndarray
Quadrature nodes
weights : ndarray
Quadrature weights
See Also
--------
roots_hermite
References
----------
.. [townsend.trogdon.olver-2014]
Townsend, A. and Trogdon, T. and Olver, S. (2014)
*Fast computation of Gauss quadrature nodes and
weights on the whole real line*. :arXiv:`1410.5286`.
.. [townsend.trogdon.olver-2015]
Townsend, A. and Trogdon, T. and Olver, S. (2015)
*Fast computation of Gauss quadrature nodes and
weights on the whole real line*.
IMA Journal of Numerical Analysis
:doi:`10.1093/imanum/drv002`.
"""
iv = _initial_nodes(n)
nodes, weights = _newton(n, iv)
# Combine with negative parts
if n % 2 == 0:
nodes = hstack([-nodes[::-1], nodes])
weights = hstack([weights[::-1], weights])
else:
nodes = hstack([-nodes[-1:0:-1], nodes])
weights = hstack([weights[-1:0:-1], weights])
# Scale weights
weights *= sqrt(pi) / sum(weights)
return nodes, weights
def hermite(n, monic=False):
r"""Physicist's Hermite polynomial.
Defined by
.. math::
H_n(x) = (-1)^ne^{x^2}\frac{d^n}{dx^n}e^{-x^2};
:math:`H_n` is a polynomial of degree :math:`n`.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
H : orthopoly1d
Hermite polynomial.
Notes
-----
The polynomials :math:`H_n` are orthogonal over :math:`(-\infty,
\infty)` with weight function :math:`e^{-x^2}`.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
if n == 0:
n1 = n + 1
else:
n1 = n
x, w, mu0 = roots_hermite(n1, mu=True)
wfunc = lambda x: exp(-x * x)
if n == 0:
x, w = [], []
hn = 2**n * _gam(n + 1) * sqrt(pi)
kn = 2**n
p = orthopoly1d(x, w, hn, kn, wfunc, (-inf, inf), monic,
lambda x: eval_hermite(n, x))
return p
# Hermite 2 He_n(x)
def roots_hermitenorm(n, mu=False):
r"""Gauss-Hermite (statistician's) quadrature.
Computes the sample points and weights for Gauss-Hermite quadrature.
The sample points are the roots of the n-th degree Hermite polynomial,
:math:`He_n(x)`. These sample points and weights correctly integrate
polynomials of degree :math:`2n - 1` or less over the interval
:math:`[-\infty, \infty]` with weight function :math:`f(x) = e^{-x^2/2}`.
Parameters
----------
n : int
quadrature order
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
Notes
-----
For small n up to 150 a modified version of the Golub-Welsch
algorithm is used. Nodes are computed from the eigenvalue
problem and improved by one step of a Newton iteration.
The weights are computed from the well-known analytical formula.
For n larger than 150 an optimal asymptotic algorithm is used
which computes nodes and weights in a numerical stable manner.
The algorithm has linear runtime making computation for very
large n (several thousand or more) feasible.
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
numpy.polynomial.hermite_e.hermegauss
"""
m = int(n)
if n < 1 or n != m:
raise ValueError("n must be a positive integer.")
mu0 = np.sqrt(2.0*np.pi)
if n <= 150:
an_func = lambda k: 0.0*k
bn_func = lambda k: np.sqrt(k)
f = cephes.eval_hermitenorm
df = lambda n, x: n * cephes.eval_hermitenorm(n-1, x)
return _gen_roots_and_weights(m, mu0, an_func, bn_func, f, df, True, mu)
else:
nodes, weights = _roots_hermite_asy(m)
# Transform
nodes *= sqrt(2)
weights *= sqrt(2)
if mu:
return nodes, weights, mu0
else:
return nodes, weights
def hermitenorm(n, monic=False):
r"""Normalized (probabilist's) Hermite polynomial.
Defined by
.. math::
He_n(x) = (-1)^ne^{x^2/2}\frac{d^n}{dx^n}e^{-x^2/2};
:math:`He_n` is a polynomial of degree :math:`n`.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
He : orthopoly1d
Hermite polynomial.
Notes
-----
The polynomials :math:`He_n` are orthogonal over :math:`(-\infty,
\infty)` with weight function :math:`e^{-x^2/2}`.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
if n == 0:
n1 = n + 1
else:
n1 = n
x, w, mu0 = roots_hermitenorm(n1, mu=True)
wfunc = lambda x: exp(-x * x / 2.0)
if n == 0:
x, w = [], []
hn = sqrt(2 * pi) * _gam(n + 1)
kn = 1.0
p = orthopoly1d(x, w, hn, kn, wfunc=wfunc, limits=(-inf, inf), monic=monic,
eval_func=lambda x: eval_hermitenorm(n, x))
return p
# The remainder of the polynomials can be derived from the ones above.
# Ultraspherical (Gegenbauer) C^(alpha)_n(x)
def roots_gegenbauer(n, alpha, mu=False):
r"""Gauss-Gegenbauer quadrature.
Computes the sample points and weights for Gauss-Gegenbauer quadrature.
The sample points are the roots of the n-th degree Gegenbauer polynomial,
:math:`C^{\alpha}_n(x)`. These sample points and weights correctly
integrate polynomials of degree :math:`2n - 1` or less over the interval
:math:`[-1, 1]` with weight function
:math:`f(x) = (1 - x^2)^{\alpha - 1/2}`.
Parameters
----------
n : int
quadrature order
alpha : float
alpha must be > -0.5
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
"""
m = int(n)
if n < 1 or n != m:
raise ValueError("n must be a positive integer.")
if alpha < -0.5:
raise ValueError("alpha must be greater than -0.5.")
elif alpha == 0.0:
# C(n,0,x) == 0 uniformly, however, as alpha->0, C(n,alpha,x)->T(n,x)
# strictly, we should just error out here, since the roots are not
# really defined, but we used to return something useful, so let's
# keep doing so.
return roots_chebyt(n, mu)
mu0 = np.sqrt(np.pi) * cephes.gamma(alpha + 0.5) / cephes.gamma(alpha + 1)
an_func = lambda k: 0.0 * k
bn_func = lambda k: np.sqrt(k * (k + 2 * alpha - 1)
/ (4 * (k + alpha) * (k + alpha - 1)))
f = lambda n, x: cephes.eval_gegenbauer(n, alpha, x)
df = lambda n, x: (-n*x*cephes.eval_gegenbauer(n, alpha, x)
+ (n + 2*alpha - 1)*cephes.eval_gegenbauer(n-1, alpha, x))/(1-x**2)
return _gen_roots_and_weights(m, mu0, an_func, bn_func, f, df, True, mu)
def gegenbauer(n, alpha, monic=False):
r"""Gegenbauer (ultraspherical) polynomial.
Defined to be the solution of
.. math::
(1 - x^2)\frac{d^2}{dx^2}C_n^{(\alpha)}
- (2\alpha + 1)x\frac{d}{dx}C_n^{(\alpha)}
+ n(n + 2\alpha)C_n^{(\alpha)} = 0
for :math:`\alpha > -1/2`; :math:`C_n^{(\alpha)}` is a polynomial
of degree :math:`n`.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
C : orthopoly1d
Gegenbauer polynomial.
Notes
-----
The polynomials :math:`C_n^{(\alpha)}` are orthogonal over
:math:`[-1,1]` with weight function :math:`(1 - x^2)^{(\alpha -
1/2)}`.
"""
base = jacobi(n, alpha - 0.5, alpha - 0.5, monic=monic)
if monic:
return base
# Abrahmowitz and Stegan 22.5.20
factor = (_gam(2*alpha + n) * _gam(alpha + 0.5) /
_gam(2*alpha) / _gam(alpha + 0.5 + n))
base._scale(factor)
base.__dict__['_eval_func'] = lambda x: eval_gegenbauer(float(n), alpha, x)
return base
# Chebyshev of the first kind: T_n(x) =
# n! sqrt(pi) / _gam(n+1./2)* P^(-1/2,-1/2)_n(x)
# Computed anew.
def roots_chebyt(n, mu=False):
r"""Gauss-Chebyshev (first kind) quadrature.
Computes the sample points and weights for Gauss-Chebyshev quadrature.
The sample points are the roots of the n-th degree Chebyshev polynomial of
the first kind, :math:`T_n(x)`. These sample points and weights correctly
integrate polynomials of degree :math:`2n - 1` or less over the interval
:math:`[-1, 1]` with weight function :math:`f(x) = 1/\sqrt{1 - x^2}`.
Parameters
----------
n : int
quadrature order
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
numpy.polynomial.chebyshev.chebgauss
"""
m = int(n)
if n < 1 or n != m:
raise ValueError('n must be a positive integer.')
x = np.cos(np.arange(2 * m - 1, 0, -2) * pi / (2 * m))
w = np.empty_like(x)
w.fill(pi/m)
if mu:
return x, w, pi
else:
return x, w
def chebyt(n, monic=False):
r"""Chebyshev polynomial of the first kind.
Defined to be the solution of
.. math::
(1 - x^2)\frac{d^2}{dx^2}T_n - x\frac{d}{dx}T_n + n^2T_n = 0;
:math:`T_n` is a polynomial of degree :math:`n`.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
T : orthopoly1d
Chebyshev polynomial of the first kind.
Notes
-----
The polynomials :math:`T_n` are orthogonal over :math:`[-1, 1]`
with weight function :math:`(1 - x^2)^{-1/2}`.
See Also
--------
chebyu : Chebyshev polynomial of the second kind.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
wfunc = lambda x: 1.0 / sqrt(1 - x * x)
if n == 0:
return orthopoly1d([], [], pi, 1.0, wfunc, (-1, 1), monic,
lambda x: eval_chebyt(n, x))
n1 = n
x, w, mu = roots_chebyt(n1, mu=True)
hn = pi / 2
kn = 2**(n - 1)
p = orthopoly1d(x, w, hn, kn, wfunc, (-1, 1), monic,
lambda x: eval_chebyt(n, x))
return p
# Chebyshev of the second kind
# U_n(x) = (n+1)! sqrt(pi) / (2*_gam(n+3./2)) * P^(1/2,1/2)_n(x)
def roots_chebyu(n, mu=False):
r"""Gauss-Chebyshev (second kind) quadrature.
Computes the sample points and weights for Gauss-Chebyshev quadrature.
The sample points are the roots of the n-th degree Chebyshev polynomial of
the second kind, :math:`U_n(x)`. These sample points and weights correctly
integrate polynomials of degree :math:`2n - 1` or less over the interval
:math:`[-1, 1]` with weight function :math:`f(x) = \sqrt{1 - x^2}`.
Parameters
----------
n : int
quadrature order
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
"""
m = int(n)
if n < 1 or n != m:
raise ValueError('n must be a positive integer.')
t = np.arange(m, 0, -1) * pi / (m + 1)
x = np.cos(t)
w = pi * np.sin(t)**2 / (m + 1)
if mu:
return x, w, pi / 2
else:
return x, w
def chebyu(n, monic=False):
r"""Chebyshev polynomial of the second kind.
Defined to be the solution of
.. math::
(1 - x^2)\frac{d^2}{dx^2}U_n - 3x\frac{d}{dx}U_n
+ n(n + 2)U_n = 0;
:math:`U_n` is a polynomial of degree :math:`n`.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
U : orthopoly1d
Chebyshev polynomial of the second kind.
Notes
-----
The polynomials :math:`U_n` are orthogonal over :math:`[-1, 1]`
with weight function :math:`(1 - x^2)^{1/2}`.
See Also
--------
chebyt : Chebyshev polynomial of the first kind.
"""
base = jacobi(n, 0.5, 0.5, monic=monic)
if monic:
return base
factor = sqrt(pi) / 2.0 * _gam(n + 2) / _gam(n + 1.5)
base._scale(factor)
return base
# Chebyshev of the first kind C_n(x)
def roots_chebyc(n, mu=False):
r"""Gauss-Chebyshev (first kind) quadrature.
Computes the sample points and weights for Gauss-Chebyshev quadrature.
The sample points are the roots of the n-th degree Chebyshev polynomial of
the first kind, :math:`C_n(x)`. These sample points and weights correctly
integrate polynomials of degree :math:`2n - 1` or less over the interval
:math:`[-2, 2]` with weight function :math:`f(x) = 1/\sqrt{1 - (x/2)^2}`.
Parameters
----------
n : int
quadrature order
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
"""
x, w, m = roots_chebyt(n, True)
x *= 2
w *= 2
m *= 2
if mu:
return x, w, m
else:
return x, w
def chebyc(n, monic=False):
r"""Chebyshev polynomial of the first kind on :math:`[-2, 2]`.
Defined as :math:`C_n(x) = 2T_n(x/2)`, where :math:`T_n` is the
nth Chebychev polynomial of the first kind.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
C : orthopoly1d
Chebyshev polynomial of the first kind on :math:`[-2, 2]`.
Notes
-----
The polynomials :math:`C_n(x)` are orthogonal over :math:`[-2, 2]`
with weight function :math:`1/\sqrt{1 - (x/2)^2}`.
See Also
--------
chebyt : Chebyshev polynomial of the first kind.
References
----------
.. [1] Abramowitz and Stegun, "Handbook of Mathematical Functions"
Section 22. National Bureau of Standards, 1972.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
if n == 0:
n1 = n + 1
else:
n1 = n
x, w, mu0 = roots_chebyc(n1, mu=True)
if n == 0:
x, w = [], []
hn = 4 * pi * ((n == 0) + 1)
kn = 1.0
p = orthopoly1d(x, w, hn, kn,
wfunc=lambda x: 1.0 / sqrt(1 - x * x / 4.0),
limits=(-2, 2), monic=monic)
if not monic:
p._scale(2.0 / p(2))
p.__dict__['_eval_func'] = lambda x: eval_chebyc(n, x)
return p
# Chebyshev of the second kind S_n(x)
def roots_chebys(n, mu=False):
r"""Gauss-Chebyshev (second kind) quadrature.
Computes the sample points and weights for Gauss-Chebyshev quadrature.
The sample points are the roots of the n-th degree Chebyshev polynomial of
the second kind, :math:`S_n(x)`. These sample points and weights correctly
integrate polynomials of degree :math:`2n - 1` or less over the interval
:math:`[-2, 2]` with weight function :math:`f(x) = \sqrt{1 - (x/2)^2}`.
Parameters
----------
n : int
quadrature order
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
"""
x, w, m = roots_chebyu(n, True)
x *= 2
w *= 2
m *= 2
if mu:
return x, w, m
else:
return x, w
def chebys(n, monic=False):
r"""Chebyshev polynomial of the second kind on :math:`[-2, 2]`.
Defined as :math:`S_n(x) = U_n(x/2)` where :math:`U_n` is the
nth Chebychev polynomial of the second kind.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
S : orthopoly1d
Chebyshev polynomial of the second kind on :math:`[-2, 2]`.
Notes
-----
The polynomials :math:`S_n(x)` are orthogonal over :math:`[-2, 2]`
with weight function :math:`\sqrt{1 - (x/2)}^2`.
See Also
--------
chebyu : Chebyshev polynomial of the second kind
References
----------
.. [1] Abramowitz and Stegun, "Handbook of Mathematical Functions"
Section 22. National Bureau of Standards, 1972.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
if n == 0:
n1 = n + 1
else:
n1 = n
x, w, mu0 = roots_chebys(n1, mu=True)
if n == 0:
x, w = [], []
hn = pi
kn = 1.0
p = orthopoly1d(x, w, hn, kn,
wfunc=lambda x: sqrt(1 - x * x / 4.0),
limits=(-2, 2), monic=monic)
if not monic:
factor = (n + 1.0) / p(2)
p._scale(factor)
p.__dict__['_eval_func'] = lambda x: eval_chebys(n, x)
return p
# Shifted Chebyshev of the first kind T^*_n(x)
def roots_sh_chebyt(n, mu=False):
r"""Gauss-Chebyshev (first kind, shifted) quadrature.
Computes the sample points and weights for Gauss-Chebyshev quadrature.
The sample points are the roots of the n-th degree shifted Chebyshev
polynomial of the first kind, :math:`T_n(x)`. These sample points and
weights correctly integrate polynomials of degree :math:`2n - 1` or less
over the interval :math:`[0, 1]` with weight function
:math:`f(x) = 1/\sqrt{x - x^2}`.
Parameters
----------
n : int
quadrature order
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
"""
xw = roots_chebyt(n, mu)
return ((xw[0] + 1) / 2,) + xw[1:]
def sh_chebyt(n, monic=False):
r"""Shifted Chebyshev polynomial of the first kind.
Defined as :math:`T^*_n(x) = T_n(2x - 1)` for :math:`T_n` the nth
Chebyshev polynomial of the first kind.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
T : orthopoly1d
Shifted Chebyshev polynomial of the first kind.
Notes
-----
The polynomials :math:`T^*_n` are orthogonal over :math:`[0, 1]`
with weight function :math:`(x - x^2)^{-1/2}`.
"""
base = sh_jacobi(n, 0.0, 0.5, monic=monic)
if monic:
return base
if n > 0:
factor = 4**n / 2.0
else:
factor = 1.0
base._scale(factor)
return base
# Shifted Chebyshev of the second kind U^*_n(x)
def roots_sh_chebyu(n, mu=False):
r"""Gauss-Chebyshev (second kind, shifted) quadrature.
Computes the sample points and weights for Gauss-Chebyshev quadrature.
The sample points are the roots of the n-th degree shifted Chebyshev
polynomial of the second kind, :math:`U_n(x)`. These sample points and
weights correctly integrate polynomials of degree :math:`2n - 1` or less
over the interval :math:`[0, 1]` with weight function
:math:`f(x) = \sqrt{x - x^2}`.
Parameters
----------
n : int
quadrature order
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
"""
x, w, m = roots_chebyu(n, True)
x = (x + 1) / 2
m_us = cephes.beta(1.5, 1.5)
w *= m_us / m
if mu:
return x, w, m_us
else:
return x, w
def sh_chebyu(n, monic=False):
r"""Shifted Chebyshev polynomial of the second kind.
Defined as :math:`U^*_n(x) = U_n(2x - 1)` for :math:`U_n` the nth
Chebyshev polynomial of the second kind.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
U : orthopoly1d
Shifted Chebyshev polynomial of the second kind.
Notes
-----
The polynomials :math:`U^*_n` are orthogonal over :math:`[0, 1]`
with weight function :math:`(x - x^2)^{1/2}`.
"""
base = sh_jacobi(n, 2.0, 1.5, monic=monic)
if monic:
return base
factor = 4**n
base._scale(factor)
return base
# Legendre
def roots_legendre(n, mu=False):
r"""Gauss-Legendre quadrature.
Computes the sample points and weights for Gauss-Legendre quadrature.
The sample points are the roots of the n-th degree Legendre polynomial
:math:`P_n(x)`. These sample points and weights correctly integrate
polynomials of degree :math:`2n - 1` or less over the interval
:math:`[-1, 1]` with weight function :math:`f(x) = 1.0`.
Parameters
----------
n : int
quadrature order
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
numpy.polynomial.legendre.leggauss
"""
m = int(n)
if n < 1 or n != m:
raise ValueError("n must be a positive integer.")
mu0 = 2.0
an_func = lambda k: 0.0 * k
bn_func = lambda k: k * np.sqrt(1.0 / (4 * k * k - 1))
f = cephes.eval_legendre
df = lambda n, x: (-n*x*cephes.eval_legendre(n, x)
+ n*cephes.eval_legendre(n-1, x))/(1-x**2)
return _gen_roots_and_weights(m, mu0, an_func, bn_func, f, df, True, mu)
def legendre(n, monic=False):
r"""Legendre polynomial.
Defined to be the solution of
.. math::
\frac{d}{dx}\left[(1 - x^2)\frac{d}{dx}P_n(x)\right]
+ n(n + 1)P_n(x) = 0;
:math:`P_n(x)` is a polynomial of degree :math:`n`.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
P : orthopoly1d
Legendre polynomial.
Notes
-----
The polynomials :math:`P_n` are orthogonal over :math:`[-1, 1]`
with weight function 1.
Examples
--------
Generate the 3rd-order Legendre polynomial 1/2*(5x^3 + 0x^2 - 3x + 0):
>>> from scipy.special import legendre
>>> legendre(3)
poly1d([ 2.5, 0. , -1.5, 0. ])
"""
if n < 0:
raise ValueError("n must be nonnegative.")
if n == 0:
n1 = n + 1
else:
n1 = n
x, w, mu0 = roots_legendre(n1, mu=True)
if n == 0:
x, w = [], []
hn = 2.0 / (2 * n + 1)
kn = _gam(2 * n + 1) / _gam(n + 1)**2 / 2.0**n
p = orthopoly1d(x, w, hn, kn, wfunc=lambda x: 1.0, limits=(-1, 1),
monic=monic, eval_func=lambda x: eval_legendre(n, x))
return p
# Shifted Legendre P^*_n(x)
def roots_sh_legendre(n, mu=False):
r"""Gauss-Legendre (shifted) quadrature.
Computes the sample points and weights for Gauss-Legendre quadrature.
The sample points are the roots of the n-th degree shifted Legendre
polynomial :math:`P^*_n(x)`. These sample points and weights correctly
integrate polynomials of degree :math:`2n - 1` or less over the interval
:math:`[0, 1]` with weight function :math:`f(x) = 1.0`.
Parameters
----------
n : int
quadrature order
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
"""
x, w = roots_legendre(n)
x = (x + 1) / 2
w /= 2
if mu:
return x, w, 1.0
else:
return x, w
def sh_legendre(n, monic=False):
r"""Shifted Legendre polynomial.
Defined as :math:`P^*_n(x) = P_n(2x - 1)` for :math:`P_n` the nth
Legendre polynomial.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
P : orthopoly1d
Shifted Legendre polynomial.
Notes
-----
The polynomials :math:`P^*_n` are orthogonal over :math:`[0, 1]`
with weight function 1.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
wfunc = lambda x: 0.0 * x + 1.0
if n == 0:
return orthopoly1d([], [], 1.0, 1.0, wfunc, (0, 1), monic,
lambda x: eval_sh_legendre(n, x))
x, w, mu0 = roots_sh_legendre(n, mu=True)
hn = 1.0 / (2 * n + 1.0)
kn = _gam(2 * n + 1) / _gam(n + 1)**2
p = orthopoly1d(x, w, hn, kn, wfunc, limits=(0, 1), monic=monic,
eval_func=lambda x: eval_sh_legendre(n, x))
return p
# -----------------------------------------------------------------------------
# Code for backwards compatibility
# -----------------------------------------------------------------------------
# Import functions in case someone is still calling the orthogonal
# module directly. (They shouldn't be; it's not in the public API).
poch = cephes.poch
from ._ufuncs import (binom, eval_jacobi, eval_sh_jacobi, eval_gegenbauer,
eval_chebyt, eval_chebyu, eval_chebys, eval_chebyc,
eval_sh_chebyt, eval_sh_chebyu, eval_legendre,
eval_sh_legendre, eval_genlaguerre, eval_laguerre,
eval_hermite, eval_hermitenorm)
# Make the old root function names an alias for the new ones
_modattrs = globals()
for newfun, oldfun in _rootfuns_map.items():
_modattrs[oldfun] = _modattrs[newfun]
__all__.append(oldfun)
|
bsd-3-clause
| -6,653,320,370,332,630,000 | 27.296154 | 139 | 0.55544 | false | 3.099805 | false | false | false |
lobnek/pyutil
|
test/test_mongo/test_engine/test_strategy.py
|
1
|
3434
|
from pyutil.mongo.engine.strategy import Strategy, strategies, configuration
from pyutil.mongo.engine.symbol import Symbol, Group
from pyutil.performance.drawdown import drawdown
from pyutil.performance.month import monthlytable
from pyutil.performance.return_series import from_nav
from pyutil.portfolio.portfolio import similar
import pandas.testing as pt
from test.config import *
@pytest.fixture()
def group():
Group.objects.delete()
return Group(name="US Equity").save()
@pytest.fixture()
def symbols(group, portfolio):
Symbol.objects.delete()
# add the symbols to database
for symbol in portfolio.assets:
Symbol(name=symbol, group=group).save()
def test_strategy(symbols, portfolio):
Strategy.objects.delete()
s = Strategy(name="mdt", type="mdt", active=True, source="AAA")
assert s.source == "AAA"
assert s.type == "mdt"
assert s.active
assert s.portfolio is None
assert s.last_valid_index is None
# empty dictionary as portfolio hasn't been set
assert Strategy.portfolios(strategies=[s]) == {}
s.save()
frame = Strategy.reference_frame()
assert frame.index.name == "strategy"
s.portfolio = portfolio
pt.assert_frame_equal(s.portfolio.weights, portfolio.weights)
pt.assert_frame_equal(s.portfolio.prices, portfolio.prices)
s.save()
similar(Strategy.portfolios(strategies=[s])["mdt"], portfolio)
navs = Strategy.navs()
assert not navs["mdt"].empty
frame = Strategy.sectors(strategies=[s])
assert frame.index.name == "Portfolio"
assert set(frame.keys()) == {"US Equity", "Total"}
assert frame.loc["mdt"]["US Equity"] == pytest.approx(0.308974, abs=1e-5)
def test_source(portfolio):
with open(resource("source.py"), "r") as f:
s = Strategy(name="Peter", source=f.read(), active=True, type="wild")
# construct the configuration based on the strategy (and it's source code)
c = configuration(strategy=s)
# verify the names of the configuration
assert c.names == portfolio.assets
# also possible to ask the strategy directly
assert s.assets == portfolio.assets
def test_last_valid(portfolio):
s = Strategy(name="Maffay", source="AAA", active=True, type="wild2")
s.portfolio = portfolio
assert s.last_valid_index == portfolio.prices.last_valid_index()
assert similar(s.portfolio, portfolio)
def test_strategies():
folder = resource(name="strat")
for name, source in strategies(folder=folder):
assert name in {"P1", "P2"}
def test_active():
Strategy.objects.delete()
Strategy(name="A", active=False).save()
Strategy(name="B", active=True).save()
assert len(Strategy.active_strategies()) == 1
assert len(Strategy.objects) == 2
def test_drawdown(portfolio):
Strategy.objects.delete()
s = Strategy(name="Maffay", source="")
s.portfolio = portfolio
pt.assert_series_equal(drawdown(portfolio.nav), s.drawdown)
def test_volatility(portfolio):
Strategy.objects.delete()
s = Strategy(name="Maffay", source="")
s.portfolio = portfolio
pt.assert_series_equal(from_nav(portfolio.nav).ewm_volatility().dropna(), s.ewm_volatility())
def test_monthlytable(portfolio):
Strategy.objects.delete()
s = Strategy(name="Maffay", source="")
s.portfolio = portfolio
pt.assert_frame_equal(monthlytable(portfolio.nav.pct_change()), s.monthlytable)
|
mit
| 5,867,971,628,889,860,000 | 29.39823 | 97 | 0.689866 | false | 3.437437 | true | false | false |
KlinkOnE/caf-port
|
scripts/gcc-wrapper.py
|
1
|
3550
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2011, The Linux Foundation. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of The Linux Foundation nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NON-INFRINGEMENT ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Invoke gcc, looking for warnings, and causing a failure if there are
# non-whitelisted warnings.
import re
import os
import sys
import subprocess
# Note that gcc uses unicode, which may depend on the locale. TODO:
# force LANG to be set to en_US.UTF-8 to get consistent warnings.
allowed_warnings = set([
"alignment.c:720",
"async.c:122",
"async.c:270",
"dir.c:43",
"dm.c:1053",
"dm.c:1080",
"dm-table.c:1120",
"dm-table.c:1126",
"drm_edid.c:1303",
"eventpoll.c:1143",
"f_mass_storage.c:3368",
"inode.c:72",
"inode.c:73",
"inode.c:74",
"msm_sdcc.c:126",
"msm_sdcc.c:128",
"nf_conntrack_netlink.c:790",
"nf_nat_standalone.c:118",
"return_address.c:61",
"soc-core.c:1719",
"xt_log.h:50",
])
# Capture the name of the object file, can find it.
ofile = None
warning_re = re.compile(r'''(.*/|)([^/]+\.[a-z]+:\d+):(\d+:)? warning:''')
def interpret_warning(line):
"""Decode the message from gcc. The messages we care about have a filename, and a warning"""
line = line.rstrip('\n')
m = warning_re.match(line)
# if m and m.group(2) not in allowed_warnings:
# print "error, forbidden warning:", m.group(2)
# If there is a warning, remove any object if it exists.
# if ofile:
# try:
# os.remove(ofile)
# except OSError:
# pass
# sys.exit(1)
def run_gcc():
args = sys.argv[1:]
# Look for -o
try:
i = args.index('-o')
global ofile
ofile = args[i+1]
except (ValueError, IndexError):
pass
compiler = sys.argv[0]
proc = subprocess.Popen(args, stderr=subprocess.PIPE)
for line in proc.stderr:
print line,
interpret_warning(line)
result = proc.wait()
return result
if __name__ == '__main__':
status = run_gcc()
sys.exit(status)
|
gpl-2.0
| -2,623,976,439,378,806,300 | 32.17757 | 97 | 0.663099 | false | 3.626149 | false | false | false |
CanalTP/navitia
|
source/tyr/tests/integration/autocomplete_test.py
|
1
|
11540
|
# coding: utf-8
# Copyright (c) 2001-2018, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# channel `#navitia` on riot https://riot.im/app/#/room/#navitia:matrix.org
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from __future__ import absolute_import, print_function, division, unicode_literals
from tests.check_utils import api_get, api_post, api_delete, api_put, _dt
import json
import pytest
import jmespath
from navitiacommon import models
from tyr import app
@pytest.fixture
def create_autocomplete_parameter():
with app.app_context():
autocomplete_param = models.AutocompleteParameter('idf', 'OSM', 'BANO', 'FUSIO', 'OSM', [8, 9])
models.db.session.add(autocomplete_param)
models.db.session.commit()
# we also create 3 datasets, one for bano, 2 for osm
for i, dset_type in enumerate(['bano', 'osm', 'osm']):
job = models.Job()
dataset = models.DataSet()
dataset.type = dset_type
dataset.family_type = 'autocomplete_{}'.format(dataset.type)
dataset.name = '/path/to/dataset_{}'.format(i)
models.db.session.add(dataset)
job.autocomplete_params_id = autocomplete_param.id
job.data_sets.append(dataset)
job.state = 'done'
models.db.session.add(job)
models.db.session.commit()
@pytest.fixture
def create_two_autocomplete_parameters():
with app.app_context():
autocomplete_param1 = models.AutocompleteParameter('europe', 'OSM', 'BANO', 'OSM', 'OSM', [8, 9])
autocomplete_param2 = models.AutocompleteParameter('france', 'OSM', 'OSM', 'FUSIO', 'OSM', [8, 9])
models.db.session.add(autocomplete_param1)
models.db.session.add(autocomplete_param2)
models.db.session.commit()
@pytest.fixture
def autocomplete_parameter_json():
return {
"name": "peru",
"street": "OSM",
"address": "BANO",
"poi": "FUSIO",
"admin": "OSM",
"admin_level": [8],
}
def test_get_autocomplete_parameters_empty():
resp = api_get('/v0/autocomplete_parameters/')
assert resp == []
def test_get_all_autocomplete(create_autocomplete_parameter):
resp = api_get('/v0/autocomplete_parameters/')
assert len(resp) == 1
assert resp[0]['name'] == 'idf'
assert resp[0]['street'] == 'OSM'
assert resp[0]['address'] == 'BANO'
assert resp[0]['poi'] == 'FUSIO'
assert resp[0]['admin'] == 'OSM'
assert resp[0]['admin_level'] == [8, 9]
assert not resp[0]['config_toml']
def test_get_autocomplete_by_name(create_two_autocomplete_parameters):
resp = api_get('/v0/autocomplete_parameters/')
assert len(resp) == 2
resp = api_get('/v0/autocomplete_parameters/france')
assert resp['name'] == 'france'
assert resp['street'] == 'OSM'
assert resp['address'] == 'OSM'
assert resp['poi'] == 'FUSIO'
assert resp['admin'] == 'OSM'
assert resp['admin_level'] == [8, 9]
assert not resp['config_toml']
def test_post_autocomplete(autocomplete_parameter_json):
resp = api_post(
'/v0/autocomplete_parameters',
data=json.dumps(autocomplete_parameter_json),
content_type='application/json',
)
assert resp['name'] == 'peru'
assert resp['street'] == 'OSM'
assert resp['address'] == 'BANO'
assert resp['poi'] == 'FUSIO'
assert resp['admin'] == 'OSM'
assert resp['admin_level'] == [8]
assert not resp['config_toml']
def test_post_autocomplete_cosmo():
resp = api_post(
'/v0/autocomplete_parameters',
data=json.dumps({"name": "bobette", "admin": "COSMOGONY"}),
content_type='application/json',
)
assert resp['name'] == 'bobette'
assert resp['street'] == 'OSM'
assert resp['address'] == 'BANO'
assert resp['poi'] == 'OSM'
assert resp['admin'] == 'COSMOGONY'
assert resp['admin_level'] == []
assert not resp['config_toml']
def test_put_autocomplete(create_two_autocomplete_parameters, autocomplete_parameter_json):
resp = api_get('/v0/autocomplete_parameters/france')
assert resp['name'] == 'france'
assert resp['street'] == 'OSM'
assert resp['address'] == 'OSM'
assert resp['poi'] == 'FUSIO'
assert resp['admin'] == 'OSM'
assert resp['admin_level'] == [8, 9]
assert not resp['config_toml']
resp = api_put(
'/v0/autocomplete_parameters/france',
data=json.dumps(autocomplete_parameter_json),
content_type='application/json',
)
assert resp['street'] == 'OSM'
assert resp['address'] == 'BANO'
assert resp['poi'] == 'FUSIO'
assert resp['admin'] == 'OSM'
assert resp['admin_level'] == [8]
assert not resp['config_toml']
def test_create_autocomplete_with_config_toml():
json_with_config_toml = {
"name": "bobette",
"address": "BANO",
"admin": "OSM",
"admin_level": [8],
"config_toml": "dataset = \"bobette\"\n\n[admin]\nimport = true\ncity_level = 8\nlevels = [8]\n\n"
"[way]\nimport = true\n\n[poi]\nimport = true\n",
"poi": "OSM",
"street": "OSM",
}
resp = api_post(
'/v0/autocomplete_parameters', data=json.dumps(json_with_config_toml), content_type='application/json'
)
assert resp['name'] == json_with_config_toml["name"]
assert resp['street'] == 'OSM'
assert resp['address'] == 'BANO'
assert resp['poi'] == 'OSM'
assert resp['admin'] == 'OSM'
assert resp['admin_level'] == [8]
assert resp['config_toml'] == json_with_config_toml["config_toml"]
def test_put_autocomplete_with_config_toml_not_in_database():
json_with_config_toml = {
"name": "bobette",
"address": "BANO",
"admin": "OSM",
"admin_level": [8],
"config_toml": "dataset = \"bobette\"\n\n[admin]\nimport = true\ncity_level = 8\nlevels = [8]\n\n"
"[way]\nimport = true\n\n[poi]\nimport = true\n",
"poi": "OSM",
"street": "OSM",
}
resp, status_code = api_put(
'/v0/autocomplete_parameters/bobette',
data=json.dumps(json_with_config_toml),
content_type='application/json',
check=False,
)
assert status_code == 201
assert resp['name'] == json_with_config_toml["name"]
assert resp['street'] == 'OSM'
assert resp['address'] == 'BANO'
assert resp['poi'] == 'OSM'
assert resp['admin'] == 'OSM'
assert resp['admin_level'] == [8]
assert resp['config_toml'] == json_with_config_toml["config_toml"]
def test_delete_autocomplete(create_two_autocomplete_parameters):
resp = api_get('/v0/autocomplete_parameters/')
assert len(resp) == 2
resp = api_get('/v0/autocomplete_parameters/france')
assert resp['name'] == 'france'
_, status = api_delete('/v0/autocomplete_parameters/france', check=False, no_json=True)
assert status == 204
_, status = api_get('/v0/autocomplete_parameters/france', check=False)
assert status == 404
resp = api_get('/v0/autocomplete_parameters/')
assert len(resp) == 1
def test_get_last_datasets_autocomplete(create_autocomplete_parameter):
"""
we query the loaded datasets of idf
we loaded 3 datasets, but by default we should get one by family_type, so one for bano, one for osm
"""
resp = api_get('/v0/autocomplete_parameters/idf/last_datasets')
assert len(resp) == 2
bano = next((d for d in resp if d['type'] == 'bano'), None)
assert bano
assert bano['family_type'] == 'autocomplete_bano'
assert bano['name'] == '/path/to/dataset_0'
osm = next((d for d in resp if d['type'] == 'osm'), None)
assert osm
assert osm['family_type'] == 'autocomplete_osm'
assert osm['name'] == '/path/to/dataset_2' # we should have the last one
# if we ask for the 2 last datasets per type, we got all of them
resp = api_get('/v0/autocomplete_parameters/idf/last_datasets?count=2')
assert len(resp) == 3
@pytest.fixture
def minimal_poi_types_json():
return {
"poi_types": [
{"id": "amenity:bicycle_rental", "name": "Station VLS"},
{"id": "amenity:parking", "name": "Parking"},
],
"rules": [
{
"osm_tags_filters": [{"key": "amenity", "value": "bicycle_rental"}],
"poi_type_id": "amenity:bicycle_rental",
},
{"osm_tags_filters": [{"key": "amenity", "value": "parking"}], "poi_type_id": "amenity:parking"},
],
}
def test_autocomplete_poi_types(create_two_autocomplete_parameters, minimal_poi_types_json):
resp = api_get('/v0/autocomplete_parameters/france')
assert resp['name'] == 'france'
# POST a minimal conf
resp = api_post(
'/v0/autocomplete_parameters/france/poi_types',
data=json.dumps(minimal_poi_types_json),
content_type='application/json',
)
def test_minimal_conf(resp):
assert len(resp['poi_types']) == 2
assert len(resp['rules']) == 2
bss_type = jmespath.search("poi_types[?id=='amenity:bicycle_rental']", resp)
assert len(bss_type) == 1
assert bss_type[0]['name'] == 'Station VLS'
bss_rule = jmespath.search("rules[?poi_type_id=='amenity:bicycle_rental']", resp)
assert len(bss_rule) == 1
assert bss_rule[0]['osm_tags_filters'][0]['value'] == 'bicycle_rental'
# check that it's not the "default" conf
assert not jmespath.search("poi_types[?id=='amenity:townhall']", resp)
# check that the conf is correctly set on france
test_minimal_conf(resp)
# check that the conf on europe is still empty
resp = api_get('/v0/autocomplete_parameters/europe/poi_types')
assert not resp
# check GET of newly defined france conf
resp = api_get('/v0/autocomplete_parameters/france/poi_types')
test_minimal_conf(resp)
# check DELETE of france conf
resp, code = api_delete('/v0/autocomplete_parameters/france/poi_types', check=False, no_json=True)
assert not resp
assert code == 204
# check get of conf on france is now empty
resp = api_get('/v0/autocomplete_parameters/france/poi_types')
assert not resp
# check that tyr refuses incorrect conf
resp, code = api_post(
'/v0/autocomplete_parameters/france/poi_types',
data=json.dumps({'poi_types': [{'id': 'bob', 'name': 'Bob'}]}),
content_type='application/json',
check=False,
)
assert code == 400
assert resp['status'] == 'error'
assert 'rules' in resp['message']
|
agpl-3.0
| 7,122,653,603,733,621,000 | 33.550898 | 110 | 0.62175 | false | 3.319908 | true | false | false |
samuelshaner/openmc
|
tests/run_tests.py
|
1
|
17900
|
#!/usr/bin/env python
from __future__ import print_function
import os
import sys
import shutil
import re
import glob
import socket
from subprocess import call, check_output
from collections import OrderedDict
from optparse import OptionParser
# Command line parsing
parser = OptionParser()
parser.add_option('-j', '--parallel', dest='n_procs', default='1',
help="Number of parallel jobs.")
parser.add_option('-R', '--tests-regex', dest='regex_tests',
help="Run tests matching regular expression. \
Test names are the directories present in tests folder.\
This uses standard regex syntax to select tests.")
parser.add_option('-C', '--build-config', dest='build_config',
help="Build configurations matching regular expression. \
Specific build configurations can be printed out with \
optional argument -p, --print. This uses standard \
regex syntax to select build configurations.")
parser.add_option('-l', '--list', action="store_true",
dest="list_build_configs", default=False,
help="List out build configurations.")
parser.add_option("-p", "--project", dest="project", default="",
help="project name for build")
parser.add_option("-D", "--dashboard", dest="dash",
help="Dash name -- Experimental, Nightly, Continuous")
parser.add_option("-u", "--update", action="store_true", dest="update",
help="Allow CTest to update repo. (WARNING: may overwrite\
changes that were not pushed.")
parser.add_option("-s", "--script", action="store_true", dest="script",
help="Activate CTest scripting mode for coverage, valgrind\
and dashboard capability.")
(options, args) = parser.parse_args()
# Default compiler paths
FC='gfortran'
CC='gcc'
MPI_DIR='/opt/mpich/3.2-gnu'
HDF5_DIR='/opt/hdf5/1.8.16-gnu'
PHDF5_DIR='/opt/phdf5/1.8.16-gnu'
# Script mode for extra capability
script_mode = False
# Override default compiler paths if environmental vars are found
if 'FC' in os.environ:
FC = os.environ['FC']
if 'CC' in os.environ:
CC = os.environ['CC']
if 'MPI_DIR' in os.environ:
MPI_DIR = os.environ['MPI_DIR']
if 'HDF5_DIR' in os.environ:
HDF5_DIR = os.environ['HDF5_DIR']
if 'PHDF5_DIR' in os.environ:
PHDF5_DIR = os.environ['PHDF5_DIR']
# CTest script template
ctest_str = """set (CTEST_SOURCE_DIRECTORY "{source_dir}")
set (CTEST_BINARY_DIRECTORY "{build_dir}")
set(CTEST_SITE "{host_name}")
set (CTEST_BUILD_NAME "{build_name}")
set (CTEST_CMAKE_GENERATOR "Unix Makefiles")
set (CTEST_BUILD_OPTIONS "{build_opts}")
set(CTEST_UPDATE_COMMAND "git")
set(CTEST_CONFIGURE_COMMAND "${{CMAKE_COMMAND}} -H${{CTEST_SOURCE_DIRECTORY}} -B${{CTEST_BINARY_DIRECTORY}} ${{CTEST_BUILD_OPTIONS}}")
set(CTEST_MEMORYCHECK_COMMAND "{valgrind_cmd}")
set(CTEST_MEMORYCHECK_COMMAND_OPTIONS "--tool=memcheck --leak-check=yes --show-reachable=yes --num-callers=20 --track-fds=yes")
#set(CTEST_MEMORYCHECK_SUPPRESSIONS_FILE ${{CTEST_SOURCE_DIRECTORY}}/../tests/valgrind.supp)
set(MEM_CHECK {mem_check})
if(MEM_CHECK)
set(ENV{{MEM_CHECK}} ${{MEM_CHECK}})
endif()
set(CTEST_COVERAGE_COMMAND "gcov")
set(COVERAGE {coverage})
set(ENV{{COVERAGE}} ${{COVERAGE}})
{subproject}
ctest_start("{dashboard}")
ctest_configure(RETURN_VALUE res)
{update}
ctest_build(RETURN_VALUE res)
if(NOT MEM_CHECK)
ctest_test({tests} PARALLEL_LEVEL {n_procs}, RETURN_VALUE res)
endif()
if(MEM_CHECK)
ctest_memcheck({tests} RETURN_VALUE res)
endif(MEM_CHECK)
if(COVERAGE)
ctest_coverage(RETURN_VALUE res)
endif(COVERAGE)
{submit}
if (res EQUAL 0)
else()
message(FATAL_ERROR "")
endif()
"""
# Define test data structure
tests = OrderedDict()
def cleanup(path):
"""Remove generated output files."""
for dirpath, dirnames, filenames in os.walk(path):
for fname in filenames:
for ext in ['.h5', '.ppm', '.voxel']:
if fname.endswith(ext) and fname != '1d_mgxs.h5':
os.remove(os.path.join(dirpath, fname))
def which(program):
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
class Test(object):
def __init__(self, name, debug=False, optimize=False, mpi=False, openmp=False,
phdf5=False, valgrind=False, coverage=False):
self.name = name
self.debug = debug
self.optimize = optimize
self.mpi = mpi
self.openmp = openmp
self.phdf5 = phdf5
self.valgrind = valgrind
self.coverage = coverage
self.success = True
self.msg = None
self.skipped = False
self.cmake = ['cmake', '-H..', '-Bbuild',
'-DPYTHON_EXECUTABLE=' + sys.executable]
# Check for MPI
if self.mpi:
if os.path.exists(os.path.join(MPI_DIR, 'bin', 'mpifort')):
self.fc = os.path.join(MPI_DIR, 'bin', 'mpifort')
else:
self.fc = os.path.join(MPI_DIR, 'bin', 'mpif90')
self.cc = os.path.join(MPI_DIR, 'bin', 'mpicc')
else:
self.fc = FC
self.cc = CC
# Sets the build name that will show up on the CDash
def get_build_name(self):
self.build_name = options.project + '_' + self.name
return self.build_name
# Sets up build options for various tests. It is used both
# in script and non-script modes
def get_build_opts(self):
build_str = ""
if self.debug:
build_str += "-Ddebug=ON "
if self.optimize:
build_str += "-Doptimize=ON "
if self.openmp:
build_str += "-Dopenmp=ON "
if self.coverage:
build_str += "-Dcoverage=ON "
self.build_opts = build_str
return self.build_opts
# Write out the ctest script to tests directory
def create_ctest_script(self, ctest_vars):
with open('ctestscript.run', 'w') as fh:
fh.write(ctest_str.format(**ctest_vars))
# Runs the ctest script which performs all the cmake/ctest/cdash
def run_ctest_script(self):
os.environ['FC'] = self.fc
os.environ['CC'] = self.cc
if self.mpi:
os.environ['MPI_DIR'] = MPI_DIR
if self.phdf5:
os.environ['HDF5_ROOT'] = PHDF5_DIR
else:
os.environ['HDF5_ROOT'] = HDF5_DIR
rc = call(['ctest', '-S', 'ctestscript.run','-V'])
if rc != 0:
self.success = False
self.msg = 'Failed on ctest script.'
# Runs cmake when in non-script mode
def run_cmake(self):
os.environ['FC'] = self.fc
os.environ['CC'] = self.cc
if self.mpi:
os.environ['MPI_DIR'] = MPI_DIR
if self.phdf5:
os.environ['HDF5_ROOT'] = PHDF5_DIR
else:
os.environ['HDF5_ROOT'] = HDF5_DIR
build_opts = self.build_opts.split()
self.cmake += build_opts
rc = call(self.cmake)
if rc != 0:
self.success = False
self.msg = 'Failed on cmake.'
# Runs make when in non-script mode
def run_make(self):
if not self.success:
return
# Default make string
make_list = ['make','-s']
# Check for parallel
if options.n_procs is not None:
make_list.append('-j')
make_list.append(options.n_procs)
# Run make
rc = call(make_list)
if rc != 0:
self.success = False
self.msg = 'Failed on make.'
# Runs ctest when in non-script mode
def run_ctests(self):
if not self.success:
return
# Default ctest string
ctest_list = ['ctest']
# Check for parallel
if options.n_procs is not None:
ctest_list.append('-j')
ctest_list.append(options.n_procs)
# Check for subset of tests
if options.regex_tests is not None:
ctest_list.append('-R')
ctest_list.append(options.regex_tests)
# Run ctests
rc = call(ctest_list)
if rc != 0:
self.success = False
self.msg = 'Failed on testing.'
# Simple function to add a test to the global tests dictionary
def add_test(name, debug=False, optimize=False, mpi=False, openmp=False,\
phdf5=False, valgrind=False, coverage=False):
tests.update({name: Test(name, debug, optimize, mpi, openmp, phdf5,
valgrind, coverage)})
# List of all tests that may be run. User can add -C to command line to specify
# a subset of these configurations
add_test('hdf5-normal')
add_test('hdf5-debug', debug=True)
add_test('hdf5-optimize', optimize=True)
add_test('omp-hdf5-normal', openmp=True)
add_test('omp-hdf5-debug', openmp=True, debug=True)
add_test('omp-hdf5-optimize', openmp=True, optimize=True)
add_test('mpi-hdf5-normal', mpi=True)
add_test('mpi-hdf5-debug', mpi=True, debug=True)
add_test('mpi-hdf5-optimize', mpi=True, optimize=True)
add_test('phdf5-normal', mpi=True, phdf5=True)
add_test('phdf5-debug', mpi=True, phdf5=True, debug=True)
add_test('phdf5-optimize', mpi=True, phdf5=True, optimize=True)
add_test('phdf5-omp-normal', mpi=True, phdf5=True, openmp=True)
add_test('phdf5-omp-debug', mpi=True, phdf5=True, openmp=True, debug=True)
add_test('phdf5-omp-optimize', mpi=True, phdf5=True, openmp=True, optimize=True)
add_test('hdf5-debug_valgrind', debug=True, valgrind=True)
add_test('hdf5-debug_coverage', debug=True, coverage=True)
# Check to see if we should just print build configuration information to user
if options.list_build_configs:
for key in tests:
print('Configuration Name: {0}'.format(key))
print(' Debug Flags:..........{0}'.format(tests[key].debug))
print(' Optimization Flags:...{0}'.format(tests[key].optimize))
print(' MPI Active:...........{0}'.format(tests[key].mpi))
print(' OpenMP Active:........{0}'.format(tests[key].openmp))
print(' Valgrind Test:........{0}'.format(tests[key].valgrind))
print(' Coverage Test:........{0}\n'.format(tests[key].coverage))
exit()
# Delete items of dictionary that don't match regular expression
if options.build_config is not None:
to_delete = []
for key in tests:
if not re.search(options.build_config, key):
to_delete.append(key)
for key in to_delete:
del tests[key]
# Check for dashboard and determine whether to push results to server
# Note that there are only 3 basic dashboards:
# Experimental, Nightly, Continuous. On the CDash end, these can be
# reorganized into groups when a hostname, dashboard and build name
# are matched.
if options.dash is None:
dash = 'Experimental'
submit = ''
else:
dash = options.dash
submit = 'ctest_submit()'
# Check for update command, which will run git fetch/merge and will delete
# any changes to repo that were not pushed to remote origin
if options.update:
update = 'ctest_update()'
else:
update = ''
# Check for CTest scipts mode
# Sets up whether we should use just the basic ctest command or use
# CTest scripting to perform tests.
if not options.dash is None or options.script:
script_mode = True
else:
script_mode = False
# Setup CTest script vars. Not used in non-script mode
pwd = os.getcwd()
ctest_vars = {
'source_dir': os.path.join(pwd, os.pardir),
'build_dir': os.path.join(pwd, 'build'),
'host_name': socket.gethostname(),
'dashboard': dash,
'submit': submit,
'update': update,
'n_procs': options.n_procs
}
# Check project name
subprop = """set_property(GLOBAL PROPERTY SubProject {0})"""
if options.project == "" :
ctest_vars.update({'subproject':''})
elif options.project == 'develop':
ctest_vars.update({'subproject':''})
else:
ctest_vars.update({'subproject':subprop.format(options.project)})
# Set up default valgrind tests (subset of all tests)
# Currently takes too long to run all the tests with valgrind
# Only used in script mode
valgrind_default_tests = "cmfd_feed|confidence_intervals|\
density|eigenvalue_genperbatch|energy_grid|entropy|\
lattice_multiple|output|plotreflective_plane|\
rotation|salphabetascore_absorption|seed|source_energy_mono|\
sourcepoint_batch|statepoint_interval|survival_biasing|\
tally_assumesep|translation|uniform_fs|universe|void"
# Delete items of dictionary if valgrind or coverage and not in script mode
to_delete = []
if not script_mode:
for key in tests:
if re.search('valgrind|coverage', key):
to_delete.append(key)
for key in to_delete:
del tests[key]
# Check if tests empty
if len(list(tests.keys())) == 0:
print('No tests to run.')
exit()
# Begin testing
shutil.rmtree('build', ignore_errors=True)
cleanup('.')
for key in iter(tests):
test = tests[key]
# Extra display if not in script mode
if not script_mode:
print('-'*(len(key) + 6))
print(key + ' tests')
print('-'*(len(key) + 6))
sys.stdout.flush()
# Verify fortran compiler exists
if which(test.fc) is None:
self.msg = 'Compiler not found: {0}'.format(test.fc)
self.success = False
continue
# Verify valgrind command exists
if test.valgrind:
valgrind_cmd = which('valgrind')
if valgrind_cmd is None:
self.msg = 'No valgrind executable found.'
self.success = False
continue
else:
valgrind_cmd = ''
# Verify gcov/lcov exist
if test.coverage:
if which('gcov') is None:
self.msg = 'No {} executable found.'.format(exe)
self.success = False
continue
# Set test specific CTest script vars. Not used in non-script mode
ctest_vars.update({'build_name': test.get_build_name()})
ctest_vars.update({'build_opts': test.get_build_opts()})
ctest_vars.update({'mem_check': test.valgrind})
ctest_vars.update({'coverage': test.coverage})
ctest_vars.update({'valgrind_cmd': valgrind_cmd})
# Check for user custom tests
# INCLUDE is a CTest command that allows for a subset
# of tests to be executed. Only used in script mode.
if options.regex_tests is None:
ctest_vars.update({'tests' : ''})
# No user tests, use default valgrind tests
if test.valgrind:
ctest_vars.update({'tests' : 'INCLUDE {0}'.
format(valgrind_default_tests)})
else:
ctest_vars.update({'tests' : 'INCLUDE {0}'.
format(options.regex_tests)})
# Main part of code that does the ctest execution.
# It is broken up by two modes, script and non-script
if script_mode:
# Create ctest script
test.create_ctest_script(ctest_vars)
# Run test
test.run_ctest_script()
else:
# Run CMAKE to configure build
test.run_cmake()
# Go into build directory
os.chdir('build')
# Build OpenMC
test.run_make()
# Run tests
test.run_ctests()
# Leave build directory
os.chdir(os.pardir)
# Copy over log file
if script_mode:
logfile = glob.glob('build/Testing/Temporary/LastTest_*.log')
else:
logfile = glob.glob('build/Testing/Temporary/LastTest.log')
if len(logfile) > 0:
logfilename = os.path.split(logfile[0])[1]
logfilename = os.path.splitext(logfilename)[0]
logfilename = logfilename + '_{0}.log'.format(test.name)
shutil.copy(logfile[0], logfilename)
# For coverage builds, use lcov to generate HTML output
if test.coverage:
if which('lcov') is None or which('genhtml') is None:
print('No lcov/genhtml command found. '
'Could not generate coverage report.')
else:
shutil.rmtree('coverage', ignore_errors=True)
call(['lcov', '--directory', '.', '--capture',
'--output-file', 'coverage.info'])
call(['genhtml', '--output-directory', 'coverage', 'coverage.info'])
os.remove('coverage.info')
if test.valgrind:
# Copy memcheck output to memcheck directory
shutil.rmtree('memcheck', ignore_errors=True)
os.mkdir('memcheck')
memcheck_out = glob.glob('build/Testing/Temporary/MemoryChecker.*.log')
for fname in memcheck_out:
shutil.copy(fname, 'memcheck/')
# Remove generated XML files
xml_files = check_output(['git', 'ls-files', '.', '--exclude-standard',
'--others']).split()
for f in xml_files:
os.remove(f)
# Clear build directory and remove binary and hdf5 files
shutil.rmtree('build', ignore_errors=True)
if script_mode:
os.remove('ctestscript.run')
cleanup('.')
# Print out summary of results
print('\n' + '='*54)
print('Summary of Compilation Option Testing:\n')
if sys.stdout.isatty():
OK = '\033[92m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
else:
OK = ''
FAIL = ''
ENDC = ''
BOLD = ''
return_code = 0
for test in tests:
print(test + '.'*(50 - len(test)), end='')
if tests[test].success:
print(BOLD + OK + '[OK]' + ENDC)
else:
print(BOLD + FAIL + '[FAILED]' + ENDC)
print(' '*len(test)+tests[test].msg)
return_code = 1
sys.exit(return_code)
|
mit
| -5,832,388,550,918,468,000 | 31.965009 | 134 | 0.611955 | false | 3.545256 | true | false | false |
tsotetsi/textily-web
|
temba/middleware.py
|
1
|
5471
|
from __future__ import absolute_import, unicode_literals
import pstats
import traceback
import copy
from cStringIO import StringIO
from django.conf import settings
from django.db import transaction
from django.utils import timezone, translation
from temba.orgs.models import Org
from temba.contacts.models import Contact
try:
import cProfile as profile
except ImportError: # pragma: no cover
import profile
class ExceptionMiddleware(object):
def process_exception(self, request, exception):
if settings.DEBUG:
traceback.print_exc(exception)
return None
class BrandingMiddleware(object):
@classmethod
def get_branding_for_host(cls, host):
# ignore subdomains
if len(host.split('.')) > 2: # pragma: needs cover
host = '.'.join(host.split('.')[-2:])
# prune off the port
if ':' in host:
host = host[0:host.rindex(':')]
# our default branding
branding = settings.BRANDING.get(settings.DEFAULT_BRAND)
branding['host'] = settings.DEFAULT_BRAND
# override with site specific branding if we have that
site_branding = settings.BRANDING.get(host, None)
if site_branding:
branding = copy.deepcopy(branding)
branding.update(site_branding)
branding['host'] = host
return branding
def process_request(self, request):
"""
Check for any branding options based on the current host
"""
host = 'localhost'
try:
host = request.get_host()
except Exception: # pragma: needs cover
traceback.print_exc()
request.branding = BrandingMiddleware.get_branding_for_host(host)
class ActivateLanguageMiddleware(object):
def process_request(self, request):
user = request.user
language = request.branding.get('language', settings.DEFAULT_LANGUAGE)
if user.is_anonymous() or user.is_superuser:
translation.activate(language)
else:
user_settings = user.get_settings()
translation.activate(user_settings.language)
class OrgTimezoneMiddleware(object):
def process_request(self, request):
user = request.user
org = None
if not user.is_anonymous():
org_id = request.session.get('org_id', None)
if org_id:
org = Org.objects.filter(is_active=True, pk=org_id).first()
# only set the org if they are still a user or an admin
if org and (user.is_superuser or user.is_staff or user in org.get_org_users()):
user.set_org(org)
# otherwise, show them what orgs are available
else:
user_orgs = user.org_admins.all() | user.org_editors.all() | user.org_viewers.all() | user.org_surveyors.all()
user_orgs = user_orgs.distinct('pk')
if user_orgs.count() == 1:
user.set_org(user_orgs[0])
org = request.user.get_org()
if org:
timezone.activate(org.timezone)
else:
timezone.activate(settings.USER_TIME_ZONE)
return None
class FlowSimulationMiddleware(object):
def process_request(self, request):
Contact.set_simulation(False)
return None
class ProfilerMiddleware(object): # pragma: no cover
"""
Simple profile middleware to profile django views. To run it, add ?prof to
the URL like this:
http://localhost:8000/view/?prof
Optionally pass the following to modify the output:
?sort => Sort the output by a given metric. Default is time.
See http://docs.python.org/2/library/profile.html#pstats.Stats.sort_stats
for all sort options.
?count => The number of rows to display. Default is 100.
This is adapted from an example found here:
http://www.slideshare.net/zeeg/django-con-high-performance-django-presentation.
"""
def can(self, request):
return settings.DEBUG and 'prof' in request.GET
def process_view(self, request, callback, callback_args, callback_kwargs):
if self.can(request):
self.profiler = profile.Profile()
args = (request,) + callback_args
return self.profiler.runcall(callback, *args, **callback_kwargs)
def process_response(self, request, response):
if self.can(request):
self.profiler.create_stats()
io = StringIO()
stats = pstats.Stats(self.profiler, stream=io)
stats.strip_dirs().sort_stats(request.GET.get('sort', 'time'))
stats.print_stats(int(request.GET.get('count', 100)))
response.content = '<pre>%s</pre>' % io.getvalue()
return response
class NonAtomicGetsMiddleware(object):
"""
Django's non_atomic_requests decorator gives us no way of enabling/disabling transactions depending on the request
type. This middleware will make the current request non-atomic if an _non_atomic_gets attribute is set on the view
function, and if the request method is GET.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
if getattr(view_func, '_non_atomic_gets', False):
if request.method.lower() == 'get':
transaction.non_atomic_requests(view_func)
else:
view_func._non_atomic_requests = set()
return None
|
agpl-3.0
| 4,714,688,521,143,626,000 | 31.182353 | 126 | 0.628039 | false | 4.138427 | false | false | false |
FlintHill/SUAS-Competition
|
SUASSystem/SUASSystem/image_processing.py
|
1
|
3266
|
from time import sleep
from PIL import Image
import os
import math
import random
from .utils import *
from UpdatedImageProcessing import *
from .settings import GCSSettings
from .converter_functions import inverse_haversine, get_mission_json
from .location import Location
"""
This file contains our image processing logic and utilizes our cropper function.
"""
def run_img_proc_process(logger_queue, location_log, targets_to_submit, interop_client_array):
while True:
if len(targets_to_submit) > 0:
target_characteristics = targets_to_submit.pop(0)
target_time = get_image_timestamp_from_metadata("static/imgs/" + target_characteristics["base_image_filename"])
closest_time_index = 0
least_time_difference = location_log[0]["epoch_time"]
for index in range(len(location_log)):
difference_in_times = target_time - location_log[index]["epoch_time"]
if abs(difference_in_times) <= least_time_difference:
closest_time_index = index
least_time_difference = difference_in_times
drone_gps_location = Location(location_log[closest_time_index]["latitude"], location_log[closest_time_index]["longitude"], location_log[closest_time_index]["altitude"])
image = Image.open("static/imgs/" + target_characteristics["base_image_filename"])
image_midpoint = (image.width / 2, image.height / 2)
target_midpoint = ((target_characteristics["target_top_left"][0] + target_characteristics["target_bottom_right"][0]) / 2, (target_characteristics["target_top_left"][1] + target_characteristics["target_bottom_right"][1]) / 2)
target_location = get_target_gps_location(image_midpoint, target_midpoint, drone_gps_location)
target_characteristics["latitude"] = target_location.get_lat()
target_characteristics["longitude"] = target_location.get_lon()
original_image_path = "static/all_imgs/" + target_characteristics["base_image_filename"]
cropped_target_path = "static/crops/" + str(len(os.listdir('static/crops'))) + ".jpg"
cropped_target_data_path = "static/crops/" + str(len(os.listdir('static/crops'))) + ".json"
crop_target(original_image_path, cropped_target_path, target_characteristics["target_top_left"], target_characteristics["target_bottom_right"])
save_json_data(cropped_target_data_path, target_characteristics)
# comment out these lines if testing w/o interop
if target_characteristics["type"] == "standard":
interop_client_array[0].post_manual_standard_target(target_characteristics, cropped_target_path)
elif target_characteristics["type"] == "emergent":
interop_client_array[0].post_manual_emergent_target(target_characteristics, cropped_target_path)
sleep(0.1)
def run_autonomous_img_proc_process(logger_queue, interop_client_array, img_proc_status, autonomous_targets_to_submit):
while True:
if len(autonomous_targets_to_submit) > 0:
target_info = autonomous_targets_to_submit.pop()
interop_client_array[0].post_autonomous_target(target_info)
sleep(0.5)
|
mit
| -1,299,459,855,585,969,200 | 56.298246 | 236 | 0.672994 | false | 3.745413 | false | false | false |
itoledoc/gWTO2
|
arrayResolution2p.py
|
1
|
23318
|
#!/usr/bin/python
"""
Script to return the Cycle 2 arrays for a given LAS, Angular Resolution
HISTORY:
2012.10.11:
- first shot
- Cycle 1 setup
2012.11.20:
- Adding resolution, LAS for different declination
2012.12.27:
- adding a filter if no configuration is found to multiply by a fudge factor
2013.03.04:
- Adding geometric average for the spatial resolution and test on twice the spatial resolution
2013.03.05:
- fixing bugs
- adding extra information (THB, array)
2013.03.11:
- Removing the condition about AR/2. for the array.
2013.03.18:
- Adding point source option
- putting the AR/2. if it is not a point source
2013.03.20:
- changing the PS option to LAS = 0
2013.05.02:
- changing slightly the conditions of acceptance (>= instead of >)
2013.05.03:
- print version
- try increasing fudge factor until it gets a solution
2013.05.10:
- Add silentRun for P2G (FG)
2013.12.13:
- Name change for Cycle2 and update of the pickle.
- Update of the finder (multi-configuration)
2013.12.16:
- new find_array3 for the multi-configuration
2014.01.28:
- fix the fudge relaxation for the resolution.
2014.05.22:
- New algorithm to deal with minAr, maxAr
2014.05.22:
- ugly fixConfiguration when OT forces for two configurations
2014.06.02
- fixing the matchAR
- relaxing the condition to allow a better AR with a sufficient LAS (0.9)
2014.06.05:
- adapting the case 7-m+12-m
2014.06.13:
- fix an edge problem when LAS = 0
RUN:
Input RES (arcsec) LAS (arcsec) FREQ (GHz) Declination (Degree) Y/N (ACA)
> python arrayResolution2.py 0.2 2.5 640. -53 Y PS
"""
__author__ = "ALMA : SL, AL, FG"
__version__ = "[email protected]"
import sys, pickle, os
import math
### ALMA
LATITUDE_ALMA = -23.03
DEG2RAD = math.pi/180.
class arrayRes:
def __init__(self, arguments):
self.LAS = [26.1,26.3,18.0,18.0,14.4,9.1,9.1]
self.LASA = [44.0,44.0,44.0,44.0,14.4,9.1,9.1]
self.LAST = [390.0,390.0,390.0,390.0,14.4,9.,9.1]
self.res = [3.73,2.04,1.40,1.11,0.75,0.57,0.41]
self.frequencyReference = 100.
self.lasObs = 0.
self.resObs = 0.
self.resObsOriginal = 0
self.lasOriginal = 0
self.freqObs = 0.
self.pointSource = False
self.silent = True
self.nof12m = 1
self.minAR = [0.,10000.]
self.maxAR = [0, 0.]
self.args = arguments
self.array = {0:"C34-1",1:"C34-2",2:"C34-3",3:"C34-4",4:"C34-5",5:"C34-6",6:"C34-7"}
self.read_cycle2()
def set_las(self,las):
"Set the LAS of the observation"
self.lasObs = las
self.lasOriginal = las
def set_res(self,res):
"Set the angular resolution of the observation"
self.resObs = res
self.resOriginal = res
def set_frequency(self,freq):
"Set the frequency of the observation"
# if ((freq>=64.)&(freq<=116.)): freq = 100.
# if ((freq>=211.)&(freq<=275.)): freq = 230.
# if ((freq>=275.)&(freq<=373.)): freq = 345.
# if ((freq>=602.)&(freq<=720.)): freq = 675.
self.freqObs = freq
def set_declination(self,declination):
"Set the representative declination of the observation"
self.declination = declination
def set_aca(self,aca):
"Set the frequency of the observation"
self.acaUse = aca
def set_12m(self,numberof12m):
"Set the number of 12m array configuration"
self.nof12m = numberof12m
def set_pointSource(self, isPS):
"Set True if point source"
self.pointSource = isPS
def read_cycle2(self, directory=None):
directory = os.environ['WTO'] + 'conf/'
self.data = []
f = open(directory+'Resolution-C34-1.pickle')
self.data.append(pickle.load(f))
f.close()
f = open(directory+'Resolution-C34-2.pickle')
self.data.append(pickle.load(f))
f.close()
f = open(directory+'Resolution-C34-3.pickle')
self.data.append(pickle.load(f))
f.close()
f = open(directory+'Resolution-C34-4.pickle')
self.data.append(pickle.load(f))
f.close()
f = open(directory+'Resolution-C34-5.pickle')
self.data.append(pickle.load(f))
f.close()
f = open(directory+'Resolution-C34-6.pickle')
self.data.append(pickle.load(f))
f.close()
f = open(directory+'Resolution-C34-7.pickle')
self.data.append(pickle.load(f))
f.close()
### ACA ####
f = open(directory+'Resolution-ACA-std.pickle')
self.aca = pickle.load(f)
f.close()
def find_array(self):
"Find the array with the obs. input"
TP='N'
arrayMatch = []
scalingFrequency = self.frequencyReference / self.freqObs
if (self.acaUse == 'Y'):
self.LAS=self.LAST
if self.lasObs / scalingFrequency > self.LASA[1]:
TP='Y'
for arr in self.array :
if self.silent:
print self.LAS[arr] * scalingFrequency, self.res[arr] * scalingFrequency
if self.LAS[arr] * scalingFrequency >= self.lasObs and self.res[arr] * scalingFrequency <= self.resObs:
arrayMatch.append(self.array[arr])
else:
arrayMatch.append("")
return arrayMatch,TP
def find_array2(self,verbose = False):
"Find the array with the obs. input using the precise resolution, LAS..."
TP = 'N'
scalingFrequency = self.frequencyReference / self.freqObs
nData = len(self.data[0][0])
decMin = self.data[0][0][0]
decMax = self.data[0][0][nData-1]
deltaDec = (decMax-decMin)/nData
index = int(math.floor(((self.declination-decMin) / deltaDec)))
# print index
### No ACA
arrayMatch = []
for arr in self.array :
lasArr = self.data[arr][3][index]
resArr = math.sqrt(self.data[arr][1][index] * self.data[arr][2][index])
lasFreqArr = lasArr * scalingFrequency
spatialResolutionArr = resArr * scalingFrequency
res_thb = self.res[arr]*scalingFrequency
las_thb = self.LAS[arr]*scalingFrequency
elevation_factor = abs(1./math.sin(DEG2RAD*(90.-LATITUDE_ALMA+self.declination)))
res_estimated = math.sqrt(res_thb*res_thb*elevation_factor)
las_estimated = math.sqrt(las_thb*las_thb*elevation_factor)
if self.silent:
if(verbose):
print("# Array: %s, LAS: %5.2f, RES: %5.2f"%(self.array[arr],lasFreqArr, spatialResolutionArr ))
print("# THB: LAS: %5.2f, RES: %5.2f")%(las_estimated,res_estimated)
# print("#")
if self.pointSource:
if lasFreqArr >= self.lasObs and self.resObs >= spatialResolutionArr :
arrayMatch.append(self.array[arr])
else:
arrayMatch.append("")
else :
if lasFreqArr >= self.lasObs and self.resObs >= spatialResolutionArr and spatialResolutionArr >= self.resObs / 2. :
arrayMatch.append(self.array[arr])
else:
arrayMatch.append("")
### ACA used
if (self.acaUse == 'Y'):
arrayMatch = []
for arr in self.array:
resArr = math.sqrt(self.data[arr][1][index] * self.data[arr][2][index])
spatialResolutionArr = resArr*scalingFrequency
##
if self.pointSource:
if self.resObs > spatialResolutionArr and arr < 4:
arrayMatch.append(self.array[arr])
else:
arrayMatch.append("")
else :
if self.resObs >= spatialResolutionArr and spatialResolutionArr >= self.resObs / 2. and arr < 4:
arrayMatch.append(self.array[arr])
else:
arrayMatch.append("")
lasACA = self.aca[3][index]
if lasACA*scalingFrequency <= self.lasObs:
TP = 'Y'
return arrayMatch, TP
def find_array3(self,verbose = False):
"Find the array with the obs. input using the precise resolution, LAS.... It takes into account a multi-configuration"
TP = 'N'
scalingFrequency = self.frequencyReference / self.freqObs
nData = len(self.data[0][0])
decMin = self.data[0][0][0]
decMax = self.data[0][0][nData-1]
deltaDec = (decMax-decMin)/nData
index = int(math.floor(((self.declination-decMin) / deltaDec)))
# Cycle 2 Match Array
matchArrayCycle2 = {3:0,4:1,5:2,6:2}
###
arrayMatchRes = []
arrayMatchLAS = []
lasFreqArrAll = []
resFreqArrAll = []
for arr in self.array :
arrayMatchRes.append("")
arrayMatchLAS.append("")
lasArr = self.data[arr][3][index]
resArr = math.sqrt(self.data[arr][1][index] * self.data[arr][2][index])
lasFreqArr = lasArr * scalingFrequency
spatialResolutionArr = resArr * scalingFrequency
lasFreqArrAll.append(lasFreqArr)
resFreqArrAll.append(spatialResolutionArr)
res_thb = self.res[arr]*scalingFrequency
las_thb = self.LAS[arr]*scalingFrequency
elevation_factor = abs(1./ math.sin(DEG2RAD*(90.-LATITUDE_ALMA+self.declination)))
res_estimated = math.sqrt(res_thb*res_thb*elevation_factor)
las_estimated = math.sqrt(las_thb*las_thb*elevation_factor)
if self.silent:
if(verbose):
print("# Array: %s, LAS: %5.2f, RES: %5.2f"%(self.array[arr],lasFreqArr, spatialResolutionArr ))
print("# THB: LAS: %5.2f, RES: %5.2f")%(las_estimated,res_estimated)
# print("#")
########################### Comparison #######################
notFound = True
notFoundLAS = True
for arr in self.array :
lasFreqArr = lasFreqArrAll[arr]
spatialResolutionArr = resFreqArrAll[arr]
if self.pointSource:
if self.resObs >= spatialResolutionArr :
arrayMatchRes[arr] = self.array[arr]
notFound = False
else :
if self.resObs >= spatialResolutionArr and spatialResolutionArr >= self.resObs / 2. :
arrayMatchRes[arr] = self.array[arr]
notFound = False
if lasFreqArr <= self.lasObs and arr > 2:
arrayMatchLAS[matchArrayCycle2[arr]] = self.array[matchArrayCycle2[arr]]
if lasFreqArrAll[matchArrayCycle2[arr]] <= self.lasObs and matchArrayCycle2[arr] > 0:
for i in range(0,matchArrayCycle2[arr]):
if lasFreqArrAll[i] >= self.lasObs :
arrayMatchLAS[i] = self.array[i]
notFoundLAS = False
### ACA used ###############
if (self.acaUse == 'Y'):
arrayMatchRes = []
arrayMatchLAS = []
for arr in self.array :
arrayMatchRes.append("")
arrayMatchLAS.append("")
for arr in self.array:
spatialResolutionArr = resFreqArrAll[arr]
if self.resObs >= spatialResolutionArr and spatialResolutionArr >= self.resObs / 2. :
arrayMatchRes[arr] = self.array[arr]
notFound = False
if arr > 2:
arrayMatchLAS[matchArrayCycle2[arr]] = self.array[matchArrayCycle2[arr]]
lasACA = self.aca[3][index]
if lasACA*scalingFrequency <= self.lasObs:
TP = 'Y'
return [arrayMatchRes,arrayMatchLAS] , TP , notFound, notFoundLAS
def matchAR(self,resLas):
"Match the spatial resolution for the number of configurations"
scalingFrequency = self.frequencyReference / self.freqObs
nData = len(self.data[0][0])
decMin = self.data[0][0][0]
decMax = self.data[0][0][nData-1]
deltaDec = (decMax-decMin)/nData
zenith = int(math.floor(((-23.0-decMin) / deltaDec)))
## check if the resolution is lower than the most compact one
##
b0_12compact = self.data[0][1][zenith] * scalingFrequency
b1_12compact = self.data[0][2][zenith] * scalingFrequency
resCompact = math.sqrt(b0_12compact*b1_12compact)
maxArrayCycle2 = 6
b0_12ext = self.data[maxArrayCycle2][1][zenith] * scalingFrequency
b1_12ext = self.data[maxArrayCycle2][2][zenith] * scalingFrequency
resExt = math.sqrt(b0_12ext*b1_12ext)
########
#print resCompact
#print resExt
if self.nof12m == 1:
self.maxAR[0] = self.resOriginal * 1.1
self.minAR[0] = self.resOriginal * 0.7
## We relax the condition to get at least 0.9 LAS
for arr in self.array:
lasArr = self.data[arr][3][zenith] *scalingFrequency
b0 = self.data[arr][1][zenith] * scalingFrequency
b1 = self.data[arr][2][zenith] * scalingFrequency
res = math.sqrt(b0*b1)
if lasArr > 0.9 * self.lasObs and res < self.minAR[0] :
# print res
self.minAR[0] = res
if self.resOriginal > resCompact and self.lasObs != 0.:
self.minAR[0] = resCompact * 0.8
self.maxAR[0] = self.resOriginal * 1.1
if self.resOriginal < resExt:
self.minAR[0] = self.resOriginal
self.maxAR[0] = resExt * 1.1
if self.nof12m == 2:
## estimate the array 1
self.minAR[0] = self.resOriginal * 0.7
self.maxAR[0] = self.resOriginal * 1.1
minArr = 1000
maxArr = 0
for s in resLas:
for arr in self.array:
if s == self.array[arr]:
if arr < minArr:
minArr = arr
if arr > maxArr:
maxArr = arr
b0 = self.data[arr][1][zenith] * scalingFrequency
b1 = self.data[arr][2][zenith] * scalingFrequency
res = math.sqrt(b0*b1)
if res > self.maxAR[1]:
self.maxAR[1] = res
if res < self.minAR[1]:
self.minAR[1] = res
if minArr > 0:
b0 = self.data[minArr-1][1][zenith] * scalingFrequency
b1 = self.data[minArr-1][2][zenith] * scalingFrequency
res = math.sqrt(b0*b1) * 0.9
self.maxAR[1] = res
if self.maxAR[1] == self.minAR[1]:
b0 = self.data[maxArr+1][1][zenith] * scalingFrequency
b1 = self.data[maxArr+1][2][zenith] * scalingFrequency
res = math.sqrt(b0*b1) * 1.3
self.minAR[1] = res
## check on the highest spatial resolution
if self.resOriginal < resExt:
self.minAR[0] = self.resOriginal * 0.7
self.maxAR[0] = resExt * 1.1
def fixConfiguration(self,result,nof12m):
" Fix the configurations"
lasC = []
extC = []
ext = 0
las = 0
for s in result[0]:
if s != '':
ext += 1
extC.append(s)
for s in result[1]:
if s != '':
las += 1
lasC.append(s)
if nof12m == 2 and las == 0 :
if extC[-1] == 'C34-7' :
resN = ['C34-6']
elif extC[-1] == 'C34-6':
resN = ['C34-5']
elif extC[-1] == 'C34-5' :
resN = ['C34-4']
elif extC[-1] == 'C34-4' :
resN = ['C34-3']
elif extC[-1] == 'C34-3' :
resN = ['C34-2']
elif extC[-1] == 'C34-2' :
resN = ['C34-1']
result[1]= resN
return(result)
########################################################################3
def silentRun(self):
self.silent = False
def run(self):
"Run the matching array"
TP="N"
self.set_res(float(self.args[1]))
self.set_las(float(self.args[2]))
self.set_frequency(float(self.args[3]))
self.set_declination(float(-23.0))
self.set_aca((self.args[4]))
self.set_12m(int(self.args[5]))
if self.lasObs == 0.:
self.set_pointSource(True)
strOut = "### arrayResolution2p \n"
strOut += "### Version: %s \n"%(__version__)
strOut += "### Input \n"
if self.pointSource:
strOut += "# Point Source ! \n"
strOut += "# Spatial Resolution: %s \n"%(self.args[1])
strOut += "# LAS: %s \n"%(self.args[2])
strOut += "# Frequency: %s GHz \n"%(self.args[3])
# strOut += "# Declination: %s \n"%(self.args[4])
strOut += "# 7-m Array (Y/N): %s \n"%(self.args[4])
strOut += "# Num. of 12-m Array: %d \n\n"%(self.nof12m)
strOut += "### Output (target frequency) \n"
strOut += "### Using CASA simulation with natural weighting (slightly different of THB)"
if self.silent:
print(strOut)
notFound = True
maxTry = 100
nTry = 1
deltaFudgeFactor = 0.05
fudgeFactor = 1.0
res , TP , notFound , notFoundLAS = self.find_array3(verbose = True)
while (notFound and nTry < maxTry and notFoundLAS and self.acaUse == 'N' ):
nTry += 1
notFound = False
fudgeFactor += deltaFudgeFactor
self.resObs *= fudgeFactor
self.lasObs /= fudgeFactor
res , TP , notFound , notFoundLAS = self.find_array3()
while (notFound and nTry < maxTry ):
nTry += 1
notFound = False
fudgeFactor += deltaFudgeFactor
self.resObs *= fudgeFactor
res , TP , notFound , notFoundLAS = self.find_array3()
if nTry > 1 :
if self.silent:
print("# No array configuration found, fudge factor applied (Tol = %3.0f %%)"%((fudgeFactor-1.)*100.))
if notFound and nTry > 1:
if self.silent:
print("# No array configuration found, even with fudge factor, problem ...")
strOutRes = ""
strOutLAS = ""
pcomR = ""
pcomL = ""
if self.silent:
print ""
print("### Results - AR - LAS")
if notFound :
if self.silent:
print ",,,,,"
else:
for s in res[0]:
strOutRes += pcomR+s
pcomR = ","
strOutRes += ","
strOutRes += TP
for s in res[1]:
strOutLAS += pcomL+s
pcomL = ","
strOutLAS += ","
strOutLAS += TP
# if self.silent:
# print strOutLAS
# print strOutRes
# print res
resN = self.fixConfiguration(res, self.nof12m)
# print resN
self.matchAR(resN[1])
if self.nof12m == 1:
#print("One Array:")
#print("Min Resolution : %5.2f "%(self.minAR[0]))
#print("Max Resolution : %5.2f "%(self.maxAR[0]))
return self.minAR[0], self.maxAR[0], 0, 0
elif self.nof12m == 2:
#print("Extended Array:")
#print("Min Resolution minAR_e %5.2f "%(self.minAR[0]))
#print("Max Resolution maxAR_e %5.2f "%(self.maxAR[0]))
#print("Compact Array:")
#print("Min Resolution minAR_c %5.2f "%(self.minAR[1]))
#print("Max Resolution maxAR_c %5.2f "%(self.maxAR[1]))
return self.minAR[0], self.maxAR[0], self.minAR[1], self.maxAR[1]
#====== Standalone program =========================
if __name__=="__main__":
arg = sys.argv
if len(arg) < 6:
print "Arguments missing \n"
print "The correct syntax is:"
print "python arrayResolution2p.py RES (arcsec) LAS (arcsec) FREQ (GHz) Y/N (ACA) numberofarray \n"
print "Example:"
print "python arrayResolution2p.py 0.2 2.0 640. Y 1 ## if LAS = 0. assumes a point source"
else :
a = arrayRes(arg)
a.run()
|
gpl-2.0
| -7,200,967,021,325,123,000 | 30.173797 | 133 | 0.46908 | false | 3.904555 | true | false | false |
Sayter99/86Scratch
|
Helpers/86Scratch/s2a_fm.py
|
1
|
4575
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 25 13:17:15 2013
@author: Alan Yorinks
Copyright (c) 2013-14 Alan Yorinks All right reserved.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
import sys
from PyMata.pymata import PyMata
import scratch_http_server
from scratch_command_handlers import ScratchCommandHandlers
import time
#noinspection PyBroadException
def s2a_fm():
"""
This is the "main" function of the program.
It will instantiate PyMata for communication with an Arduino micro-controller
and the command handlers class.
It will the start the HTTP server to communicate with Scratch 2.0
@return : This is the main loop and should never return
"""
# total number of pins on arduino board
total_pins_discovered = 0
# number of pins that are analog
number_of_analog_pins_discovered = 0
print 's2a_fm version 1.5 Copyright(C) 2013-14 Alan Yorinks All Rights Reserved '
# get the com_port from the command line or default if none given
# if user specified the com port on the command line, use that when invoking PyMata,
# else use '/dev/ttyACM0'
if len(sys.argv) == 2:
com_port = str(sys.argv[1])
else:
com_port = '/dev/ttyACM0'
try:
# instantiate PyMata
firmata = PyMata(com_port) # pragma: no cover
except Exception:
print 'Could not instantiate PyMata - is your Arduino plugged in?'
return
# determine the total number of pins and the number of analog pins for the Arduino
# get the arduino analog pin map
# it will contain an entry for all the pins with non-analog set to firmata.IGNORE
firmata.analog_mapping_query()
capability_map = firmata.get_analog_mapping_request_results()
firmata.capability_query()
print "Please wait for Total Arduino Pin Discovery to complete. This can take up to 30 additional seconds."
# count the pins
for pin in capability_map:
total_pins_discovered += 1
# non analog pins will be marked as IGNORE
if pin != firmata.IGNORE:
number_of_analog_pins_discovered += 1
# instantiate the command handler
scratch_command_handler = ScratchCommandHandlers(firmata, com_port, total_pins_discovered,
number_of_analog_pins_discovered)
# wait for a maximum of 30 seconds to retrieve the Arduino capability query
start_time = time.time()
pin_capability = firmata.get_capability_query_results()
while not pin_capability:
if time.time() - start_time > 30:
print ''
print "Could not determine pin capability - exiting."
firmata.close()
# keep sending out a capability query until there is a response
pin_capability = firmata.get_capability_query_results()
time.sleep(.1)
# we've got the capability, now build a dictionary with pin as the key and a list of all the capabilities
# for the pin as the key's value
pin_list = []
total_pins_discovered = 0
for entry in pin_capability:
# bump up pin counter each time IGNORE is found
if entry == firmata.IGNORE:
scratch_command_handler.pin_map[total_pins_discovered] = pin_list
total_pins_discovered += 1
pin_list = []
else:
pin_list.append(entry)
print "Arduino Total Pin Discovery completed in %d seconds" % (int(time.time() - start_time))
try:
# start the server passing it the handle to PyMata and the command handler.
scratch_http_server.start_server(firmata, scratch_command_handler)
except Exception:
firmata.close()
return
except KeyboardInterrupt:
# give control back to the shell that started us
firmata.close()
return
if __name__ == "__main__":
s2a_fm()
|
mit
| -6,333,871,453,249,498,000 | 34.75 | 111 | 0.676066 | false | 3.950777 | false | false | false |
maxvonhippel/q2-diversity
|
q2_diversity/_filter.py
|
1
|
1145
|
# ----------------------------------------------------------------------------
# Copyright (c) 2016-2017, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import skbio
import qiime2
def filter_distance_matrix(distance_matrix: skbio.DistanceMatrix,
metadata: qiime2.Metadata,
where: str=None,
exclude_ids: bool=False) -> skbio.DistanceMatrix:
ids_to_keep = metadata.ids(where=where)
if exclude_ids:
ids_to_keep = set(distance_matrix.ids) - set(ids_to_keep)
# NOTE: there is no guaranteed ordering to output distance matrix because
# `ids_to_keep` is a set, and `DistanceMatrix.filter` uses its iteration
# order.
try:
return distance_matrix.filter(ids_to_keep, strict=False)
except skbio.stats.distance.DissimilarityMatrixError:
raise ValueError(
"All samples were filtered out of the distance matrix.")
|
bsd-3-clause
| -3,368,935,227,757,453,300 | 41.407407 | 78 | 0.571179 | false | 4.525692 | false | false | false |
lamotriz/sistemas-de-aterramento
|
src/agilent_u2531a.py
|
1
|
14700
|
# -*- coding: utf-8 -*-
# Comunicacao com a placa agilent U2531A
#
# UFC - Universidade de Federal do Ceará
#
# Responsáveis:
# Felipe Bandeira da Silva
# Francisco Alexander
#
from __future__ import division
import platform
#if platform.system() == 'Windows':
# import visa
#else:
# import visa_linux_emulation as visa
try:
import visa
except:
# Durante o processo de instalação normal usando o NSIS, o path do windows
# não estava atualizado com o Python, portanto não era possível, durante a instalação,
# a execução do pip para instalar o "pyvisa" que requer por natureza, várias
# dependências que são simplesmene tratadas pelo pip. Portanto para a primeira
# utilização do programa é necessário a utilização da internet.
#
# Para que tudo funcione corretamente e necessario pyvisa 1.4
#import pip
#pip.main(['install', 'pyvisa'])
import subprocess
print u"aviso: instalando o PyVISA 1.4"
subprocess.call(['pip', 'install', 'PyVISA==1.4'])
print u"aviso: instalacao finalizada"
import visa
import matplotlib.pyplot as plt
from time import sleep, time, asctime, localtime
import numpy as np
###############################################################################
# Constantes para correçao. As mesmas usadas pelo programa feito no LabView
###############################################################################
FATOR_CORRECAO_TENSAO = 100
FATOR_CORRECAO_CORRENTE = 2.71
# 0 - nao mostra as mensagens
# 1 - mostras as mensagens para debug
DEBUG = 0
# um pequeno pulso inicial é visto no inicio da
# aquisição, puro ruido. Para que o sinal seja
# visualizado corretamento foi necessário aumentar
# o número de aquisições. Isso implica em uma
# aquisição mais demorada.
#QUANTIDADE_PONTOS = 50000
QUANTIDADE_PONTOS = 800000
###############################################################################
# testBit() returns a nonzero result, 2**offset, if the bit at 'offset' is one.
def testBit(int_type, offset):
mask = 1 << offset
return(int_type & mask)
# setBit() returns an integer with the bit at 'offset' set to 1.
def setBit(int_type, offset):
mask = 1 << offset
return(int_type | mask)
# clearBit() returns an integer with the bit at 'offset' cleared.
def clearBit(int_type, offset):
mask = ~(1 << offset)
return(int_type & mask)
# toggleBit() returns an integer with the bit at 'offset' inverted, 0 -> 1 and 1 -> 0.
def toggleBit(int_type, offset):
mask = 1 << offset
return(int_type ^ mask)
def lerEndian(data):
"""
Converte um sequencia de dados em valores de 2 bytes
A sequencia de entrada é dada no formato little-endian
com entrada do 13 bit para o carry.
Entrada:
data = string pura com informacoes do bloco de bytes
Saída:
t = tamanho do vetor de bytes
v = valores em um vetor
"""
raw = data[10:]
valores = []
passo = 0
for i in raw:
if passo == 0:
lsb = i
passo = 1
elif passo == 1:
msb = i
passo = 0
num = ((ord(msb)<<8)+(ord(lsb)))>>2
#print hex(num)
valores.append(num)
return [len(valores), valores]
def ler2Endian(data):
"""
Ler um bloco de bytes composto por duas leitura simultaneas do canal.
"""
raw = data[10:]
A = []
B = []
passo = 0
for i in raw:
if passo == 0:
lsb = i
passo = 1
elif passo == 1:
msb = i
passo = 2
A.append(((ord(msb)<<8)+(ord(lsb)))>>2)
elif passo == 2:
lsb = i
passo = 3
elif passo == 3:
msb = i
passo = 0
B.append(((ord(msb)<<8)+(ord(lsb)))>>2)
return [len(A), A, B]
def convBIP(raw, range_ad=10, resolution=14):
v = []
for i in raw:
v.append( (2*i)/(2**resolution) * range_ad )
return v
def convUNI(raw, range_ad=10, resolution=14):
v = []
for i in raw:
# se o 13 bit do byte for 1 então o número é "negativo"
# a conversão unipolar é dada por
# MAX = 1FFF
# MAX/2 = 0000
# 0 = 2000
if testBit(i, 13) > 0:
valor = clearBit(i, 13) - (2**14)/2
v.append( (valor/(2**resolution) + 0.5)*range_ad )
else:
v.append( (i/(2**resolution) + 0.5)*range_ad )
return v
def lerTensaoCorrente(ag):
"""
Faz a leitura de dois canais de forma simultanea
Canal 101(corrente) e 102(tensão)
"""
# reseta a placa a de aquisição
ag.write("*CLS")
ag.write("*RST")
ag.write("ROUT:ENAB 0,(@103, 104)") # desabilita os canais 103 e 104
ag.write("ROUT:ENAB 1,(@101, 102)") # habilita os canais 101 e 102
ag.write("ROUT:CHAN:RANG 10,(@101, 102)") # coloca no mesmo nivel que o programa da National
ag.write("ROUT:CHAN:POL UNIP,(@101, 102)") # unipolar
ag.write("ACQ:SRAT 2000000") # frequencia de amostragem
#ag.write("ACQ:POIN 2000000")
#ag.write("ACQ:POIN 50000") # número de pontos para aquisição
ag.write("ACQ:POIN %d" % QUANTIDADE_PONTOS)
####################
# inicia aquisicao #
####################
ag.write("DIG")
disparaTensao(ag)
#ag.write("DIG")
while True:
ag.write("WAV:COMP?")
if ag.read() == 'YES':
break
sleep(0.2) # espera um tempo até que amostra fique pronta
# Uma pequena mudança no capacitor do primeiro 555
# faz com que o set e reset necessitem de um tempo
# maior para que ambos acontecam.
sleep(.2)
retiraTensao(ag)
ag.write("WAV:DATA?")
dados = ag.read()
t, I, V = ler2Endian(dados)
V = convUNI(V, 10)
I = convUNI(I, 10)
return [dados, V, I]
def lerTensao(ag):
"""
Ler apenas o canal de tensão da fonte. Canal 102
Com toda a sequencia de acionamento do set e reset.
"""
# reset
ag.write("*CLS")
ag.write("*RST")
# inicia a leitura do canal 102 tensao
ag.write("ROUT:ENAB 0,(@103, 101, 104)")
ag.write("ROUT:ENAB 1,(@102)")
ag.write("ROUT:CHAN:RANG 10,(@102)") # coloca no mesmo nivel que o programa da National
ag.write("ROUT:CHAN:POL UNIP,(@102)")
ag.write("ACQ:SRAT 2000000")
#ag.write("ACQ:POIN 2000000")
#ag.write("ACQ:POIN 50000")
# um pequeno pulso inicial é visto no inicio da
# aquisição, puro ruido. Para que o sinal seja
# visualizado corretamento foi necessário aumentar
# o número de aquisições. Isso implica em uma
# aquisição mais demorada.
ag.write("ACQ:POIN %d" % (QUANTIDADE_PONTOS))
# inicia aquisicao
ag.write("DIG")
disparaTensao(ag)
while True:
ag.write("WAV:COMP?")
if ag.read() == 'YES':
break
sleep(0.5)
ag.write("WAV:DATA?")
dados = ag.read()
sleep(.2)
retiraTensao(ag)
#print dados
t, R = lerEndian(dados)
V = convUNI(R, 10)
plt.grid()
plt.plot(range(0, t), V)
plt.show()
return t, V
def lerCorrente(ag):
"""
Ler apenas o canal de corrente da fonte. Canal 101
Com toda a sequencia de acionamento do set e reset.
"""
# reset
ag.write("*CLS")
ag.write("*RST")
# inicia a leitura do canal 101 corrente
ag.write("ROUT:ENAB 0,(@103, 102, 104)")
ag.write("ROUT:ENAB 1,(@101)")
ag.write("ROUT:CHAN:RANG 10,(@101)")
ag.write("ROUT:CHAN:POL UNIP,(@101)")
ag.write("ACQ:SRAT 2000000")
ag.write("ACQ:POIN 2000000")
# inicia aquisicao
ag.write("DIG")
disparaTensao(ag)
while True:
ag.write("WAV:COMP?")
if ag.read() == 'YES':
break
sleep(0.5)
ag.write("WAV:DATA?")
dados = ag.read()
sleep(.2)
retiraTensao(ag)
#print dados
t, R = lerEndian(dados)
V = convUNI(R, 10)
plt.grid()
plt.plot(range(0, t), V)
plt.show()
return t, V
def lerCanal103(ag):
"""
Este canal foi usado para os testes iniciais da conversão
do análogico digital. Não sendo mais necessário.
As funçoes para leitura de tensão e corrente são identicas
a esta funçao. Mudando apenas o canal.
"""
# reset
ag.write("*CLS")
ag.write("*RST")
# inicia a leitura do canal 103
ag.write("ROUT:ENAB 0,(@101, 102, 104)")
ag.write("ROUT:ENAB 1,(@103)")
ag.write("ROUT:CHAN:RANG 10,(@103)")
#ag.write("ROUT:CHAN:POL BIP,(@103)")
ag.write("ROUT:CHAN:POL UNIP,(@103)")
ag.write("ACQ:SRAT 2000000")
ag.write("ACQ:POIN 2000000")
# inicia aquisicao
ag.write("DIG")
# espera o fim
disparaTensao(ag)
while True:
ag.write("WAV:COMP?")
if ag.read() == 'YES':
break
sleep(0.1)
ag.write("WAV:DATA?")
dados = ag.read()
sleep(.2)
retiraTensao(ag)
#print dados
t, R = lerEndian(dados)
V = convUNI(R)
plt.grid()
plt.plot(range(0, t), V)
return t, V
def disparaTensao(ag):
"""
Envia um pulso de alta tensão para o sistema de aterramento.
Acionando para isto o primeiro 555.
Os pulso não deve ser enviando em um curto intervalo de tempo
já que a fonte não foi projetada para tal situaçao.
Portanto deve-se tormar cuidado no acionamento sequencia.
SET - Pino 68 na placa U2901-60602
RESET - Pino 34 na placa U2901-60602
"""
ag.write("CONF:DIG:DIR OUTP,(@501)")
ag.write("SOUR:DIG:DATA 1,(@501)")
return 0
def retiraTensao(ag):
"""
Reseta a fonte. Habilitando a mesma para um novo envio
de um pulso de alta tensão.
"""
ag.write("CONF:DIG:DIR OUTP,(@501)")
ag.write("SOUR:DIG:DATA 0,(@501)") # desabilita o set
sleep(0.1) # espera um tempo para resetar
ag.write("SOUR:DIG:DATA 2,(@501)") # reseta a fonte
sleep(0.1) # espera um tempo para entrar em repouso
ag.write("SOUR:DIG:DATA 0,(@501)") # entra em repouso
return 0
def pltTensaoCorrente(V, I):
t1 = np.arange(0, len(V))
plt.figure(1)
plt.title("Leitura do U2531A")
plt.subplot(211)
plt.plot(t1, V)
plt.subplot(212)
plt.plot(t1, I)
plt.show()
def aplicaCorrecoes(V, I):
V = np.array(V)
V = FATOR_CORRECAO_TENSAO * V
I = np.array(I)
I = FATOR_CORRECAO_CORRENTE * I
return [V, I]
def sequenciaAquisicoes(ag, quantidade, local="C:\\Temp", rotulo = '0'):
"""
Faz um aquisiçao sequencial dos canais de tensão e corrente.
ag = objeto usada para o controle da placa
"""
print "Iniciando aquisicao sequencial"
print "Equipamento = ", ag
print "quantidade = ", quantidade
print "Tempo de inicio = ", asctime()
tempoInicio = time()
contagem = quantidade
plt.figure(1)
while quantidade > 0:
print "Atual = ", quantidade
tempoIndividual = time()
# inicia aquisição
raw, V, I = lerTensaoCorrente(ag)
V, I = aplicaCorrecoes(V, I)
# não é uma boa ideia plotar desta forma
#pltTensaoCorrente(V, I)
plt.subplot(211)
plt.plot(np.arange(0, len(V)), V)
plt.subplot(212)
plt.plot(np.arange(0, len(I)), I)
salvaTensaoTXT(local, rotulo, contagem-quantidade+1, V)
salvaCorrenteTXT(local, rotulo, contagem-quantidade+1, I)
print "Individual = ", time()-tempoIndividual
quantidade -=1
total = time()-tempoInicio
print 'Completo em [seg]: ', total
plt.show()
return 0
def salvaTensaoTXT(local, rotulo, posicao, V):
"""
Salva o vetor tensão em um arquivo com nome formatado para isso
"""
nomeCompleto = local+"\\"+rotulo+"V"+str(posicao)+".txt"
return salvaTXT(nomeCompleto, V)
def salvaCorrenteTXT(local, rotulo, posicao, I):
"""
Salva o vetor corrente em um arquivo com nome formatado para isso
"""
nomeCompleto = local+"\\"+rotulo+"I"+str(posicao)+".txt"
return salvaTXT(nomeCompleto, I)
def salvaTXT(caminhoCompleto, vetor):
"""
Salva em um arquivo txt os valores de um vetor
onde a primeira coluna informa o indice e a segunda
coluna informa o valor para o indice.
"""
try:
arquivo = open(caminhoCompleto, 'w')
except:
print 'erro: nao foi possivel escrever no arquivo'
print ' : ', caminhoCompleto
return -1
#for i in range(len(vetor)):
# string = "%d %f\n" % (i, float(vetor[i]))
# arquivo.write(string)
for i in vetor:
arquivo.write(i)
arquivo.close()
# escrita finalizada com sucesso
return 0
def buscaAgilent():
"""
Busca o equipamento conectado a porta usb do computador
Retornando o objeto a ser usado pelas funções de controle
da placa de aquisiçao da agilent.
"""
listaInstrumentos = visa.get_instruments_list() # adquiri a lista de equipamentos conectados ao computador
listaAgilent = listaInstrumentos[0] # pega o primeiro equipamento
print 'Lista de instrumentos:'
print listaAgilent # espera-se que o equipamento seja da agilent
ag = visa.instrument(listaAgilent) # cria um objeto a ser manipulado e passado para as outras funções
identificacao = ag.ask("*IDN?")
print identificacao
return ag
###############################################################################
# MAIN #
###############################################################################
if __name__ == '__main__':
print 'Agilente U3125A'
ag = buscaAgilent()
##############################
# leitura de apenas um canal #
##############################
#lerCanal103(ag)
#lerTensao(ag)
#lerCorrente(ag)
##########################
# leitura de dois canais #
##########################
raw, V, I = lerTensaoCorrente(ag)
V, I = aplicaCorrecoes(V, I)
pltTensaoCorrente(V, I)
#########################
# Aquisiçoes sequencial #
#########################
# 60 aquisições
# local onde é salvo "C:\Temp"
#sequenciaAquisicoes(ag, 10)
|
apache-2.0
| -3,173,579,012,957,982,000 | 25.769231 | 110 | 0.557373 | false | 2.980828 | false | false | false |
argivaitv/argivaitv
|
plugin.video.salts/salts_lib/kodi.py
|
1
|
4210
|
"""
SALTS XBMC Addon
Copyright (C) 2015 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import xbmcaddon
import xbmcplugin
import xbmcgui
import xbmc
import xbmcvfs
import urllib
import urlparse
import sys
import os
import re
addon = xbmcaddon.Addon()
ICON_PATH = os.path.join(addon.getAddonInfo('path'), 'icon.png')
get_setting = addon.getSetting
show_settings = addon.openSettings
def get_path():
return addon.getAddonInfo('path')
def get_profile():
return addon.getAddonInfo('profile')
def set_setting(id, value):
if not isinstance(value, basestring): value = str(value)
addon.setSetting(id, value)
def get_version():
return addon.getAddonInfo('version')
def get_id():
return addon.getAddonInfo('id')
def get_name():
return addon.getAddonInfo('name')
def get_plugin_url(queries):
try:
query = urllib.urlencode(queries)
except UnicodeEncodeError:
for k in queries:
if isinstance(queries[k], unicode):
queries[k] = queries[k].encode('utf-8')
query = urllib.urlencode(queries)
return sys.argv[0] + '?' + query
def end_of_directory(cache_to_disc=True):
xbmcplugin.endOfDirectory(int(sys.argv[1]), cacheToDisc=cache_to_disc)
def create_item(queries, label, thumb='', fanart='', is_folder=None, is_playable=None, total_items=0, menu_items=None, replace_menu=False):
list_item = xbmcgui.ListItem(label, iconImage=thumb, thumbnailImage=thumb)
add_item(queries, list_item, fanart, is_folder, is_playable, total_items, menu_items, replace_menu)
def add_item(queries, list_item, fanart='', is_folder=None, is_playable=None, total_items=0, menu_items=None, replace_menu=False):
if menu_items is None: menu_items = []
if is_folder is None:
is_folder = False if is_playable else True
if is_playable is None:
playable = 'false' if is_folder else 'true'
else:
playable = 'true' if is_playable else 'false'
liz_url = get_plugin_url(queries)
if fanart: list_item.setProperty('fanart_image', fanart)
list_item.setInfo('video', {'title': list_item.getLabel()})
list_item.setProperty('isPlayable', playable)
list_item.addContextMenuItems(menu_items, replaceItems=replace_menu)
xbmcplugin.addDirectoryItem(int(sys.argv[1]), liz_url, list_item, isFolder=is_folder, totalItems=total_items)
def parse_query(query):
q = {'mode': 'main'}
if query.startswith('?'): query = query[1:]
queries = urlparse.parse_qs(query)
for key in queries:
if len(queries[key]) == 1:
q[key] = queries[key][0]
else:
q[key] = queries[key]
return q
def notify(header=None, msg='', duration=2000, sound=None):
if header is None: header = get_name()
if sound is None: sound = get_setting('mute_notifications') == 'false'
xbmcgui.Dialog().notification(header, msg, ICON_PATH, duration, sound)
def get_current_view():
skinPath = xbmc.translatePath('special://skin/')
xml = os.path.join(skinPath, 'addon.xml')
f = xbmcvfs.File(xml)
read = f.read()
f.close()
try: src = re.search('defaultresolution="([^"]+)', read, re.DOTALL).group(1)
except: src = re.search('<res.+?folder="([^"]+)', read, re.DOTALL).group(1)
src = os.path.join(skinPath, src, 'MyVideoNav.xml')
f = xbmcvfs.File(src)
read = f.read()
f.close()
match = re.search('<views>([^<]+)', read, re.DOTALL)
if match:
views = match.group(1)
for view in views.split(','):
if xbmc.getInfoLabel('Control.GetLabel(%s)' % (view)): return view
|
gpl-2.0
| -4,517,156,319,772,024,300 | 33.793388 | 139 | 0.670784 | false | 3.493776 | false | false | false |
jwayneroth/mpd-touch
|
pygameui/button.py
|
1
|
11634
|
import label
import callback
import theme
import pygame
import view
import imageview
CENTER = 0
LEFT = 1
RIGHT = 2
TOP = 3
BOTTOM = 4
WORD_WRAP = 0
CLIP = 1
"""
Button
A button with a text caption.
Essentially an interactive label.
Signals
on_clicked(button, mousebutton)
"""
class Button(label.Label):
def __init__(self, frame, caption, halign=CENTER, valign=CENTER, wrap=CLIP):
if frame.h == 0:
frame.h = theme.current.button_height
label.Label.__init__(self, frame, caption,halign, valign,wrap)
self._enabled = True
self.on_clicked = callback.Signal()
def layout(self):
label.Label.layout(self)
if self.frame.w == 0:
self.frame.w = self.text_size[0] + self.padding[0] * 2
label.Label.layout(self)
def mouse_up(self, button, point):
self.on_clicked(self, button)
"""
ImageButton
A button that uses an image instead of a text caption.
"""
class ImageButton(view.View):
def __init__(self, frame, image):
if frame is None:
frame = pygame.Rect((0, 0), image.get_size())
elif frame.w == 0 or frame.h == 0:
frame.size = image.get_size()
view.View.__init__(self, frame)
self.on_clicked = callback.Signal()
self.image_view = imageview.ImageView(pygame.Rect(0, 0, 0, 0), image)
self.image_view._enabled = False
self.add_child(self.image_view)
def layout(self):
self.frame.w = self.padding[0] * 2 + self.image_view.frame.w
self.frame.h = self.padding[1] * 2 + self.image_view.frame.h
self.image_view.frame.topleft = self.padding
self.image_view.layout()
view.View.layout(self)
def mouse_up(self, button, point):
self.on_clicked(self, button)
"""
IconButton
"""
class IconButton(Button):
def __init__(self, frame, icon_class='cd', caption=''):
self.classes = {
'asterisk' : u'\u002a',
'plus' : u'\u002b',
'euro' : u'\u20ac',
'eur' : u'\u20ac',
'minus' : u'\u2212',
'cloud' : u'\u2601',
'envelope' : u'\u2709',
'pencil' : u'\u270f',
'glass' : u'\ue001',
'music' : u'\ue002',
'search' : u'\ue003',
'heart' : u'\ue005',
'star' : u'\ue006',
'star-empty' : u'\ue007',
'user' : u'\ue008',
'film' : u'\ue009',
'th-large' : u'\ue010',
'th' : u'\ue011',
'th-list' : u'\ue012',
'ok' : u'\ue013',
'remove' : u'\ue014',
'zoom-in' : u'\ue015',
'zoom-out' : u'\ue016',
'off' : u'\ue017',
'signal' : u'\ue018',
'cog' : u'\ue019',
'trash' : u'\ue020',
'home' : u'\ue021',
'file' : u'\ue022',
'time' : u'\ue023',
'road' : u'\ue024',
'download-alt' : u'\ue025',
'download' : u'\ue026',
'upload' : u'\ue027',
'inbox' : u'\ue028',
'play-circle' : u'\ue029',
'repeat' : u'\ue030',
'refresh' : u'\ue031',
'list-alt' : u'\ue032',
'lock' : u'\ue033',
'flag' : u'\ue034',
'headphones' : u'\ue035',
'volume-off' : u'\ue036',
'volume-down' : u'\ue037',
'volume-up' : u'\ue038',
'qrcode' : u'\ue039',
'barcode' : u'\ue040',
'tag' : u'\ue041',
'tags' : u'\ue042',
'book' : u'\ue043',
'bookmark' : u'\ue044',
'print' : u'\ue045',
'camera' : u'\ue046',
'font' : u'\ue047',
'bold' : u'\ue048',
'italic' : u'\ue049',
'text-height' : u'\ue050',
'text-width' : u'\ue051',
'align-left' : u'\ue052',
'align-center' : u'\ue053',
'align-right' : u'\ue054',
'align-justify' : u'\ue055',
'list' : u'\ue056',
'indent-left' : u'\ue057',
'indent-right' : u'\ue058',
'facetime-video' : u'\ue059',
'picture' : u'\ue060',
'map-marker' : u'\ue062',
'adjust' : u'\ue063',
'tint' : u'\ue064',
'edit' : u'\ue065',
'share' : u'\ue066',
'check' : u'\ue067',
'move' : u'\ue068',
'step-backward' : u'\ue069',
'fast-backward' : u'\ue070',
'backward' : u'\ue071',
'play' : u'\ue072',
'pause' : u'\ue073',
'stop' : u'\ue074',
'forward' : u'\ue075',
'fast-forward' : u'\ue076',
'step-forward' : u'\ue077',
'eject' : u'\ue078',
'chevron-left' : u'\ue079',
'chevron-right' : u'\ue080',
'plus-sign' : u'\ue081',
'minus-sign' : u'\ue082',
'remove-sign' : u'\ue083',
'ok-sign' : u'\ue084',
'question-sign' : u'\ue085',
'info-sign' : u'\ue086',
'screenshot' : u'\ue087',
'remove-circle' : u'\ue088',
'ok-circle' : u'\ue089',
'ban-circle' : u'\ue090',
'arrow-left' : u'\ue091',
'arrow-right' : u'\ue092',
'arrow-up' : u'\ue093',
'arrow-down' : u'\ue094',
'share-alt' : u'\ue095',
'resize-full' : u'\ue096',
'resize-small' : u'\ue097',
'exclamation-sign' : u'\ue101',
'gift' : u'\ue102',
'leaf' : u'\ue103',
'fire' : u'\ue104',
'eye-open' : u'\ue105',
'eye-close' : u'\ue106',
'warning-sign' : u'\ue107',
'plane' : u'\ue108',
'calendar' : u'\ue109',
'random' : u'\ue110',
'comment' : u'\ue111',
'magnet' : u'\ue112',
'chevron-up' : u'\ue113',
'chevron-down' : u'\ue114',
'retweet' : u'\ue115',
'shopping-cart' : u'\ue116',
'folder-close' : u'\ue117',
'folder-open' : u'\ue118',
'resize-vertical' : u'\ue119',
'resize-horizontal' : u'\ue120',
'hdd' : u'\ue121',
'bullhorn' : u'\ue122',
'bell' : u'\ue123',
'certificate' : u'\ue124',
'thumbs-up' : u'\ue125',
'thumbs-down' : u'\ue126',
'hand-right' : u'\ue127',
'hand-left' : u'\ue128',
'hand-up' : u'\ue129',
'hand-down' : u'\ue130',
'circle-arrow-right' : u'\ue131',
'circle-arrow-left' : u'\ue132',
'circle-arrow-up' : u'\ue133',
'circle-arrow-down' : u'\ue134',
'globe' : u'\ue135',
'wrench' : u'\ue136',
'tasks' : u'\ue137',
'filter' : u'\ue138',
'briefcase' : u'\ue139',
'fullscreen' : u'\ue140',
'dashboard' : u'\ue141',
'paperclip' : u'\ue142',
'heart-empty' : u'\ue143',
'link' : u'\ue144',
'phone' : u'\ue145',
'pushpin' : u'\ue146',
'usd' : u'\ue148',
'gbp' : u'\ue149',
'sort' : u'\ue150',
'sort-by-alphabet' : u'\ue151',
'sort-by-alphabet-alt' : u'\ue152',
'sort-by-order' : u'\ue153',
'sort-by-order-alt' : u'\ue154',
'sort-by-attributes' : u'\ue155',
'sort-by-attributes-alt' : u'\ue156',
'unchecked' : u'\ue157',
'expand' : u'\ue158',
'collapse-down' : u'\ue159',
'collapse-up' : u'\ue160',
'log-in' : u'\ue161',
'flash' : u'\ue162',
'log-out' : u'\ue163',
'new-window' : u'\ue164',
'record' : u'\ue165',
'save' : u'\ue166',
'open' : u'\ue167',
'saved' : u'\ue168',
'import' : u'\ue169',
'export' : u'\ue170',
'send' : u'\ue171',
'floppy-disk' : u'\ue172',
'floppy-saved' : u'\ue173',
'floppy-remove' : u'\ue174',
'floppy-save' : u'\ue175',
'floppy-open' : u'\ue176',
'credit-card' : u'\ue177',
'transfer' : u'\ue178',
'cutlery' : u'\ue179',
'header' : u'\ue180',
'compressed' : u'\ue181',
'earphone' : u'\ue182',
'phone-alt' : u'\ue183',
'tower' : u'\ue184',
'stats' : u'\ue185',
'sd-video' : u'\ue186',
'hd-video' : u'\ue187',
'subtitles' : u'\ue188',
'sound-stereo' : u'\ue189',
'sound-dolby' : u'\ue190',
'sound-5-1' : u'\ue191',
'sound-6-1' : u'\ue192',
'sound-7-1' : u'\ue193',
'copyright-mark' : u'\ue194',
'registration-mark' : u'\ue195',
'cloud-download' : u'\ue197',
'cloud-upload' : u'\ue198',
'tree-conifer' : u'\ue199',
'tree-deciduous' : u'\ue200',
'cd' : u'\ue201',
'save-file' : u'\ue202',
'open-file' : u'\ue203',
'level-up' : u'\ue204',
'copy' : u'\ue205',
'paste' : u'\ue206',
'alert' : u'\ue209',
'equalizer' : u'\ue210',
'king' : u'\ue211',
'queen' : u'\ue212',
'pawn' : u'\ue213',
'bishop' : u'\ue214',
'knight' : u'\ue215',
'baby-formula' : u'\ue216',
'tent' : u'\u26fa',
'blackboard' : u'\ue218',
'bed' : u'\ue219',
'apple' : u'\uf8ff',
'erase' : u'\ue221',
'hourglass' : u'\u231b',
'lamp' : u'\ue223',
'duplicate' : u'\ue224',
'piggy-bank' : u'\ue225',
'scissors' : u'\ue226',
'bitcoin' : u'\ue227',
'btc' : u'\ue227',
'xbt' : u'\ue227',
'yen' : u'\u00a5',
'jpy' : u'\u00a5',
'ruble' : u'\u20bd',
'rub' : u'\u20bd',
'scale' : u'\ue230',
'ice-lolly' : u'\ue231',
'ice-lolly-tasted' : u'\ue232',
'education' : u'\ue233',
'option-horizontal' : u'\ue234',
'option-vertical' : u'\ue235',
'menu-hamburger' : u'\ue236',
'modal-window' : u'\ue237',
'oil' : u'\ue238',
'grain' : u'\ue239',
'sunglasses' : u'\ue240',
'text-size' : u'\ue241',
'text-color' : u'\ue242',
'text-background' : u'\ue243',
'object-align-top' : u'\ue244',
'object-align-bottom' : u'\ue245',
'object-align-horizontal': u'\ue246',
'object-align-left' : u'\ue247',
'object-align-vertical' : u'\ue248',
'object-align-right' : u'\ue249',
'triangle-right' : u'\ue250',
'triangle-left' : u'\ue251',
'triangle-bottom' : u'\ue252',
'triangle-top' : u'\ue253',
'console' : u'\ue254',
'superscript' : u'\ue255',
'subscript' : u'\ue256',
'menu-left' : u'\ue257',
'menu-right' : u'\ue258',
'menu-down' : u'\ue259',
'menu-up' : u'\ue260'
}
self._icon_class = icon_class
caption = self.get_caption( icon_class )
Button.__init__(self, frame, caption)
def __repr__(self):
if hasattr(self, 'tag_name'):
return self.tag_name + ' icon'
if self._icon_class is None:
return ''
return self._icon_class + ' icon'
@property
def icon_class(self):
return self._icon_class
@icon_class.setter
def icon_class(self, icon_class):
self._icon_class = icon_class
caption = self.get_caption( icon_class )
self.text = caption
self.render()
def get_caption(self, class_name):
if class_name in self.classes:
return self.classes[class_name]
return self.classes['cd']
def _render(self, text):
self.text_surfaces, self.text_shadow_surfaces = [], []
#wants_shadows = (self.text_shadow_color is not None and
# self.text_shadow_offset is not None)
self.text_size = self._render_line(self._text, None)
def _render_line(self, line_text, wants_shadows):
#line_text = u'\u002a'
try:
text_surface = self.font.render(line_text, True, self.text_color)
self.text_surfaces.append(text_surface)
if wants_shadows:
text_shadow_surface = self.font.render(line_text, True, self.text_shadow_color)
self.text_shadow_surfaces.append(text_shadow_surface)
return text_surface.get_size()
except:
return (0,0)
"""
NavIconButton
"""
class NavIconButton(IconButton):
def __init__(self, frame, icon_class='cd', caption=''):
IconButton.__init__(self, frame, icon_class, caption)
"""
DialogButton
"""
class DialogButton(IconButton):
def __init__(self, frame, icon_class='cd', caption=''):
IconButton.__init__(self, frame, icon_class, caption)
|
mit
| -5,597,498,205,768,523,000 | 27.101449 | 83 | 0.514097 | false | 2.448748 | false | false | false |
ABI-Software/ZincView
|
src/zincview.py
|
1
|
30614
|
#!/usr/bin/python
"""
ZincView example visualisation application using OpenCMISS-Zinc, python, Qt (PySide)
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
import os
import sys
import json
from PySide import QtGui, QtCore
from zincview_ui import Ui_ZincView
from opencmiss.zinc.context import Context as ZincContext
from opencmiss.zinc.scenecoordinatesystem import *
from opencmiss.zinc.result import RESULT_OK
from opencmiss.zinc.field import Field
def ZincRegion_getMeshSize(region, dimension):
'''
Get the number of elements of given dimension in the region and all its child regions.
:return meshSize
'''
fieldmodule = region.getFieldmodule()
mesh = fieldmodule.findMeshByDimension(dimension)
meshSize = mesh.getSize()
# recurse children
child = region.getFirstChild()
while child.isValid():
meshSize = meshSize + ZincRegion_getMeshSize(child, dimension)
child = child.getNextSibling()
return meshSize
def ZincRegion_getTimeRange(region):
'''
Recursively get the time range of finite element field parameters in region, or any child regions
:return minimum, maximum or None, None if no range
'''
minimum = None
maximum = None
# it's not easy to get the range of time; assume all nodes have same
# time range, and use timesequence from first node field with one.
# One problem is that often the last time represents the start of an
# increment, so the new range should be higher, which matters if animating
fieldmodule = region.getFieldmodule()
for fieldDomainType in [Field.DOMAIN_TYPE_NODES, Field.DOMAIN_TYPE_DATAPOINTS]:
nodeset = fieldmodule.findNodesetByFieldDomainType(fieldDomainType)
nodeiter = nodeset.createNodeiterator()
node = nodeiter.next()
if node.isValid:
fielditer = fieldmodule.createFielditerator()
field = fielditer.next()
while field.isValid():
feField = field.castFiniteElement()
if feField.isValid():
nodetemplate = nodeset.createNodetemplate()
nodetemplate.defineFieldFromNode(feField, node)
timesequence = nodetemplate.getTimesequence(feField)
if timesequence.isValid():
count = timesequence.getNumberOfTimes()
if count > 0:
thisMinimum = timesequence.getTime(1)
thisMaximum = timesequence.getTime(count)
if minimum is None:
minimum = thisMinimum
maximum = thisMaximum
elif thisMinimum < minimum:
minimum = thisMinimum
elif thisMaximum > maximum:
maximum = thisMaximum
field = fielditer.next()
# recurse children
child = region.getFirstChild()
while child.isValid():
thisMinimum, thisMaximum = ZincRegion_getTimeRange(child)
if thisMinimum is not None:
if minimum is None:
minimum = thisMinimum
maximum = thisMaximum
elif thisMinimum < minimum:
minimum = thisMinimum
elif thisMaximum > maximum:
maximum = thisMaximum
child = child.getNextSibling()
return minimum, maximum
class ZincView(QtGui.QMainWindow):
'''
Create a subclass of QMainWindow to get menu bar functionality.
'''
def __init__(self, parent=None):
'''
Initiaise the ZincView first calling the QWidget __init__ function.
'''
QtGui.QMainWindow.__init__(self, parent)
self._context = ZincContext("ZincView")
self._rootRegion = self._context.createRegion()
# set up standard materials and glyphs so we can use them elsewhere
materialmodule = self._context.getMaterialmodule()
materialmodule.defineStandardMaterials()
glyphmodule = self._context.getGlyphmodule()
glyphmodule.defineStandardGlyphs()
# Using composition to include the visual element of the GUI.
self.ui = Ui_ZincView()
self.ui.setupUi(self)
self.ui.toolBox.setCurrentIndex(0)
self.ui.sceneviewerwidget.setContext(self._context)
self.ui.sceneviewerwidget.graphicsInitialized.connect(self._graphicsInitialized)
self.setWindowIcon(QtGui.QIcon(":/cmiss_icon.ico"))
def _graphicsInitialized(self):
'''
Callback for when SceneviewerWidget is initialised
Set up additional sceneviewer notifiers for updating widgets
'''
sceneviewer = self.ui.sceneviewerwidget.getSceneviewer()
sceneviewer.setScene(self._rootRegion.getScene())
self.ui.sceneviewerwidget.setSelectModeAll()
self.ui.sceneviewer_editor_widget.setSceneviewer(sceneviewer)
self.allSettingsUpdate()
def modelClear(self):
'''
Clear all subregions, meshes, nodesets, fields and graphics
'''
msgBox = QtGui.QMessageBox()
msgBox.setWindowTitle("ZincView")
msgBox.setText("Clear will destroy the model and all graphics.")
msgBox.setInformativeText("Proceed?")
msgBox.setStandardButtons(QtGui.QMessageBox.Ok | QtGui.QMessageBox.Cancel)
msgBox.setDefaultButton(QtGui.QMessageBox.Cancel)
result = msgBox.exec_()
if result == QtGui.QMessageBox.Cancel:
return
self._rootRegion = self._context.createRegion()
self.ui.region_chooser.setRootRegion(self._rootRegion)
scene = self._rootRegion.getScene()
self.ui.scene_editor.setScene(scene)
self.ui.sceneviewerwidget.getSceneviewer().setScene(scene)
self.allSettingsUpdate()
def modelLoad(self):
'''
Read model file or run script to read or define model.
'''
fileNameTuple = QtGui.QFileDialog.getOpenFileName(self, "Load ZincView Model", "", "ZincView scripts (*.zincview.py);;Model Files (*.ex* *.fieldml)")
inputScriptFileName = fileNameTuple[0]
fileFilter = fileNameTuple[1]
if not inputScriptFileName:
return
#print("reading file " + inputScriptFileName + ", filter " + fileFilter)
# set current directory to path from file, to support scripts and fieldml with external resources
path = os.path.dirname(inputScriptFileName)
os.chdir(path)
if "scripts" in fileFilter:
try:
# f = open(inputScriptFileName, 'r')
# myfunctions = {}
# exec f in myfunctions
# success = myfunctions['loadModel'](self._rootRegion)
sys.path.append(path)
_, filename = os.path.split(inputScriptFileName)
mod_name, _ = os.path.splitext(filename)
import importlib.util
spec = importlib.util.spec_from_file_location(mod_name, inputScriptFileName)
foo = importlib.util.module_from_spec(spec)
spec.loader.exec_module(foo)
success = foo.loadModel(self._rootRegion)
except:
success = False
else:
result = self._rootRegion.readFile(inputScriptFileName)
success = (result == RESULT_OK)
if not success:
msgBox = QtGui.QMessageBox()
msgBox.setWindowTitle("ZincView")
msgBox.setText("Error reading file: " + inputScriptFileName)
msgBox.setStandardButtons(QtGui.QMessageBox.Ok)
msgBox.setDefaultButton(QtGui.QMessageBox.Cancel)
result = msgBox.exec_()
return
scene = self._rootRegion.getScene()
# ensure scene editor graphics list is redisplayed, and widgets are updated
self.ui.scene_editor.setScene(scene)
self.ui.region_chooser.setRootRegion(self._rootRegion)
self.allSettingsUpdate()
self.viewAll()
def toolBoxPageChanged(self, page):
# enable view widget updates only when looking at them
self.ui.sceneviewer_editor_widget.setEnableUpdates(page == 2)
def _displayReal(self, widget, value):
'''
Display real value in a widget
'''
newText = '{:.5g}'.format(value)
widget.setText(newText)
def _displayScaleInteger(self, widget, values, numberFormat = '{:d}'):
'''
Display vector of integer values in a widget, separated by '*'
'''
newText = "*".join(numberFormat.format(value) for value in values)
widget.setText(newText)
def _parseScaleInteger(self, widget):
'''
Return integer vector from comma separated text in line edit widget
'''
text = widget.text()
values = [int(value) for value in text.split('*')]
if len(values) < 1:
raise
return values
def allSettingsUpdate(self):
'''
Show initial values on widgets
'''
self.tessellationMinimumDivisionsDisplay()
self.tessellationRefinementFactorsDisplay()
self.tessellationCircleDivisionsDisplay()
self.spectrumMinimumDisplay()
self.spectrumMaximumDisplay()
self.timeMinimumDisplay()
self.timeMaximumDisplay()
self.timeTextDisplay()
self.timeSliderDisplay()
def regionChanged(self, int):
region = self.ui.region_chooser.getRegion()
self.ui.scene_editor.setScene(region.getScene())
def viewAll(self):
'''
Change sceneviewer to see all of scene.
'''
self.ui.sceneviewer_editor_widget.viewAll()
def _checkTessellationDivisions(self, minimumDivisions, refinementFactors, widget):
'''
Check total divisions not too high or get user confirmation
Call with both of the vectors set, each must have at least one component.
Returns True if can apply.
'''
limit = 100000 # max elements*totalsize for each dimension
min = 1
ref = 1
totalDivisions = [1,1,1]
totalSize3d = 1
for i in range(3):
if i < len(minimumDivisions):
min = minimumDivisions[i]
if i < len(refinementFactors):
ref = refinementFactors[i]
totalDivisions[i] = min*ref
totalSize3d = totalSize3d*min*ref
totalSize2d = totalDivisions[0]*totalDivisions[1]
if totalDivisions[1]*totalDivisions[2] > totalSize2d:
totalSize2d = totalDivisions[1]*totalDivisions[2]
if totalDivisions[2]*totalDivisions[0] > totalSize2d:
totalSize2d = totalDivisions[2]*totalDivisions[0]
totalSize1d = totalDivisions[0]
if totalDivisions[1] > totalSize1d:
totalSize1d = totalDivisions[1]
if totalDivisions[2] > totalSize1d:
totalSize1d = totalDivisions[2]
meshSize3d = ZincRegion_getMeshSize(self._rootRegion, 3)
limit3d = limit
if limit3d < meshSize3d:
limit3d = meshSize3d
overLimit3d = totalSize3d*meshSize3d > limit3d
meshSize2d = ZincRegion_getMeshSize(self._rootRegion, 2)
limit2d = limit
if limit2d < meshSize2d:
limit2d = meshSize2d
overLimit2d = totalSize2d*meshSize2d > limit2d
meshSize1d = ZincRegion_getMeshSize(self._rootRegion, 1)
limit1d = limit
if limit1d < meshSize1d:
limit1d = meshSize1d
overLimit1d = totalSize1d*meshSize1d > limit1d
if not (overLimit1d or overLimit2d or overLimit3d):
return True
widget.blockSignals(True)
msgBox = QtGui.QMessageBox()
msgBox.setWindowTitle("ZincView")
divisionsText = "*".join('{:d}'.format(value) for value in totalDivisions)
msgBox.setText("Fine tessellation divisions " + divisionsText + " can take a long time to apply.")
msgBox.setInformativeText("Please confirm action.")
msgBox.setStandardButtons(QtGui.QMessageBox.Apply | QtGui.QMessageBox.Cancel)
msgBox.setDefaultButton(QtGui.QMessageBox.Cancel)
result = msgBox.exec_()
widget.blockSignals(False)
return result == QtGui.QMessageBox.Apply
def tessellationMinimumDivisionsDisplay(self):
'''
Display the current tessellation minimum divisions
'''
tessellationmodule = self._context.getTessellationmodule()
tessellation = tessellationmodule.getDefaultTessellation()
result, minimumDivisions = tessellation.getMinimumDivisions(3)
self._displayScaleInteger(self.ui.tessellation_minimum_divisions_lineedit, minimumDivisions)
def tessellationMinimumDivisionsEntered(self):
'''
Set default tessellation minimum divisions from values in widget
'''
try:
minimumDivisions = self._parseScaleInteger(self.ui.tessellation_minimum_divisions_lineedit)
# pack to length 3 for comparing with old values
while len(minimumDivisions) < 3:
minimumDivisions.append(minimumDivisions[-1])
tessellationmodule = self._context.getTessellationmodule()
tessellation = tessellationmodule.getDefaultTessellation()
result, oldMinimumDivisions = tessellation.getMinimumDivisions(3)
if minimumDivisions != oldMinimumDivisions:
result, refinementFactors = tessellation.getRefinementFactors(3)
if self._checkTessellationDivisions(minimumDivisions, refinementFactors, self.ui.tessellation_minimum_divisions_lineedit):
if RESULT_OK != tessellation.setMinimumDivisions(minimumDivisions):
raise
except:
print("Invalid tessellation minimum divisions")
#self.tessellationMinimumDivisionsDisplay()
def tessellationRefinementFactorsDisplay(self):
'''
Display the current tessellation refinement factors
'''
tessellationmodule = self._context.getTessellationmodule()
tessellation = tessellationmodule.getDefaultTessellation()
result, refinementFactors = tessellation.getRefinementFactors(3)
self._displayScaleInteger(self.ui.tessellation_refinement_factors_lineedit, refinementFactors)
def tessellationRefinementFactorsEntered(self):
'''
Set default tessellation refinement factors from values in widget
'''
try:
refinementFactors = self._parseScaleInteger(self.ui.tessellation_refinement_factors_lineedit)
# pack to length 3 for comparing with old values
while len(refinementFactors) < 3:
refinementFactors.append(refinementFactors[-1])
tessellationmodule = self._context.getTessellationmodule()
tessellation = tessellationmodule.getDefaultTessellation()
result, oldRefinementFactors = tessellation.getRefinementFactors(3)
if refinementFactors != oldRefinementFactors:
result, minimumDivisions = tessellation.getMinimumDivisions(3)
if self._checkTessellationDivisions(minimumDivisions, refinementFactors, self.ui.tessellation_refinement_factors_lineedit):
if RESULT_OK != tessellation.setRefinementFactors(refinementFactors):
raise
except:
print("Invalid tessellation refinement factors")
#self.tessellationRefinementFactorsDisplay()
def tessellationCircleDivisionsDisplay(self):
'''
Display the current tessellation circle divisions
'''
tessellationmodule = self._context.getTessellationmodule()
tessellation = tessellationmodule.getDefaultTessellation()
circleDivisions = tessellation.getCircleDivisions()
self.ui.tessellation_circle_divisions_lineedit.setText(str(circleDivisions))
def tessellationCircleDivisionsEntered(self):
'''
Set tessellation circle divisions from values in widget
'''
try:
circleDivisions = int(self.ui.tessellation_circle_divisions_lineedit.text())
tessellationmodule = self._context.getTessellationmodule()
# set circle divisions for all tessellation in module
result = RESULT_OK
tessellationmodule.beginChange()
iter = tessellationmodule.createTessellationiterator()
tessellation = iter.next()
while tessellation.isValid():
result = tessellation.setCircleDivisions(circleDivisions)
if RESULT_OK != result:
break # can't raise here otherwise no call to endChange()
tessellation = iter.next()
tessellationmodule.endChange()
if RESULT_OK != result:
raise
except:
print("Invalid tessellation circle divisions")
#self.tessellationCircleDivisionsDisplay()
def perturbLinesStateChanged(self, state):
'''
Set perturb lines flag from checkbox
'''
sceneviewer = self.ui.sceneviewerwidget.getSceneviewer()
sceneviewer.setPerturbLinesFlag(state)
def spectrumAutorangeClicked(self):
'''
Set spectrum min/max to fit range of visible data in scene graphics.
'''
sceneviewer = self.ui.sceneviewerwidget.getSceneviewer()
scene = sceneviewer.getScene()
filter = sceneviewer.getScenefilter()
spectrummodule = scene.getSpectrummodule()
spectrum = spectrummodule.getDefaultSpectrum()
result, minimum, maximum = scene.getSpectrumDataRange(filter, spectrum, 1)
if result >= 1: # result is number of components with range, can exceed 1
spectrummodule.beginChange()
spectrumcomponent = spectrum.getFirstSpectrumcomponent()
spectrumcomponent.setRangeMinimum(minimum)
spectrumcomponent.setRangeMaximum(maximum)
spectrummodule.endChange()
self.spectrumMinimumDisplay()
self.spectrumMaximumDisplay()
def spectrumMinimumDisplay(self):
'''
Display the current default spectrum minimum
'''
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
spectrummodule = scene.getSpectrummodule()
spectrum = spectrummodule.getDefaultSpectrum()
spectrumcomponent = spectrum.getFirstSpectrumcomponent()
minimum = spectrumcomponent.getRangeMinimum()
self._displayReal(self.ui.spectrum_minimum_lineedit, minimum)
def spectrumMinimumEntered(self):
'''
Set default spectrum minimum from value in the widget
'''
try:
minimum = float(self.ui.spectrum_minimum_lineedit.text())
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
spectrummodule = scene.getSpectrummodule()
spectrum = spectrummodule.getDefaultSpectrum()
spectrumcomponent = spectrum.getFirstSpectrumcomponent()
if RESULT_OK != spectrumcomponent.setRangeMinimum(minimum):
raise
except:
print("Invalid spectrum minimum")
self.spectrumMinimumDisplay()
def spectrumMaximumDisplay(self):
'''
Display the current default spectrum maximum
'''
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
spectrummodule = scene.getSpectrummodule()
spectrum = spectrummodule.getDefaultSpectrum()
spectrumcomponent = spectrum.getFirstSpectrumcomponent()
maximum = spectrumcomponent.getRangeMaximum()
self._displayReal(self.ui.spectrum_maximum_lineedit, maximum)
def spectrumMaximumEntered(self):
'''
Set default spectrum maximum from value in the widget
'''
try:
maximum = float(self.ui.spectrum_maximum_lineedit.text())
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
spectrummodule = scene.getSpectrummodule()
spectrum = spectrummodule.getDefaultSpectrum()
spectrumcomponent = spectrum.getFirstSpectrumcomponent()
if RESULT_OK != spectrumcomponent.setRangeMaximum(maximum):
raise
except:
print("Invalid spectrum maximum")
self.spectrumMaximumDisplay()
def spectrumAddColourBarClicked(self):
'''
Add an overlay graphics showing the default spectrum colour bar.
'''
sceneviewer = self.ui.sceneviewerwidget.getSceneviewer()
scene = sceneviewer.getScene()
scene.beginChange()
spectrummodule = scene.getSpectrummodule()
spectrum = spectrummodule.getDefaultSpectrum()
glyphmodule = scene.getGlyphmodule()
glyphmodule.beginChange()
colourbar = glyphmodule.findGlyphByName("colourbar")
if not colourbar.isValid():
colourbar = glyphmodule.createGlyphColourBar(spectrum)
colourbar.setName("colourbar")
glyphmodule.endChange()
graphics = scene.findGraphicsByName("colourbar")
if graphics.isValid():
scene.removeGraphics(graphics)
graphics = scene.createGraphicsPoints()
graphics.setName("colourbar")
graphics.setScenecoordinatesystem(SCENECOORDINATESYSTEM_NORMALISED_WINDOW_FIT_LEFT)
pointattributes = graphics.getGraphicspointattributes()
pointattributes.setGlyph(colourbar)
pointattributes.setBaseSize([1.0,1.0,1.0])
pointattributes.setGlyphOffset([-0.9,0.0,0.0])
scene.endChange()
# ensure scene editor graphics list is redisplayed
self.ui.scene_editor.setScene(scene)
def timeAutorangeClicked(self):
'''
Set time min/max to time range of finite element field parameters.
'''
minimum, maximum = ZincRegion_getTimeRange(self._rootRegion)
if minimum is None:
minimum = 0.0
maximum = 0.0
timekeepermodule = self._context.getTimekeepermodule()
timekeeper = timekeepermodule.getDefaultTimekeeper()
timekeeper.setMinimumTime(minimum)
timekeeper.setMaximumTime(maximum)
self.timeMinimumDisplay()
self.timeMaximumDisplay()
currentTime = timekeeper.getTime()
if currentTime < minimum:
timekeeper.setTime(minimum)
elif currentTime > maximum:
timekeeper.setTime(maximum)
self.timeTextDisplay()
self.timeSliderDisplay()
def timeMinimumDisplay(self):
'''
Display the current default timekeeper minimum time
'''
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
timekeepermodule = scene.getTimekeepermodule()
timekeeper = timekeepermodule.getDefaultTimekeeper()
minimum = timekeeper.getMinimumTime()
self._displayReal(self.ui.time_minimum_lineedit, minimum)
def timeMinimumEntered(self):
'''
Set default timekeeper minimum time from value in the widget
'''
try:
minimum = float(self.ui.time_minimum_lineedit.text())
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
timekeepermodule = scene.getTimekeepermodule()
timekeeper = timekeepermodule.getDefaultTimekeeper()
if RESULT_OK != timekeeper.setMinimumTime(minimum):
raise
except:
print("Invalid minimum time")
self.timeMinimumDisplay()
def timeMaximumDisplay(self):
'''
Display the current default timekeeper maximum time
'''
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
timekeepermodule = scene.getTimekeepermodule()
timekeeper = timekeepermodule.getDefaultTimekeeper()
maximum = timekeeper.getMaximumTime()
self._displayReal(self.ui.time_maximum_lineedit, maximum)
def timeMaximumEntered(self):
'''
Set default timekeeper maximum time from value in the widget
'''
try:
maximum = float(self.ui.time_maximum_lineedit.text())
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
timekeepermodule = scene.getTimekeepermodule()
timekeeper = timekeepermodule.getDefaultTimekeeper()
if RESULT_OK != timekeeper.setMaximumTime(maximum):
raise
except:
print("Invalid maximum time")
self.timeMaximumDisplay()
def timeTextDisplay(self):
'''
Display the default timekeeper current time
'''
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
timekeepermodule = scene.getTimekeepermodule()
timekeeper = timekeepermodule.getDefaultTimekeeper()
time = timekeeper.getTime()
self._displayReal(self.ui.time_text_lineedit, time)
def timeTextEntered(self):
'''
Set default timekeeper current time from value in the widget
'''
try:
time = float(self.ui.time_text_lineedit.text())
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
timekeepermodule = scene.getTimekeepermodule()
timekeeper = timekeepermodule.getDefaultTimekeeper()
if RESULT_OK != timekeeper.setTime(time):
raise
self.timeSliderDisplay()
except:
print("Invalid current time")
self.timeTextDisplay()
def timeSliderDisplay(self):
'''
Display the default timekeeper current time on the time slider
'''
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
timekeepermodule = scene.getTimekeepermodule()
timekeeper = timekeepermodule.getDefaultTimekeeper()
minimum = timekeeper.getMinimumTime()
maximum = timekeeper.getMaximumTime()
time = timekeeper.getTime()
# don't want signal for my change
self.ui.time_slider.blockSignals(True)
if maximum != minimum:
value = int(time*(10000.999/(maximum - minimum)))
else:
value = 0
self.ui.time_slider.setValue(value)
self.ui.time_slider.blockSignals(False)
def timeSliderChanged(self, value):
'''
Set near clipping plane distance from slider
'''
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
timekeepermodule = scene.getTimekeepermodule()
timekeeper = timekeepermodule.getDefaultTimekeeper()
minimum = timekeeper.getMinimumTime()
maximum = timekeeper.getMaximumTime()
if maximum != minimum:
time = float(value)*((maximum - minimum)/10000.0)
else:
time = minimum
timekeeper.setTime(time)
self.timeTextDisplay()
def saveImageClicked(self):
'''
Save the view in the window to an image file.
'''
fileNameTuple = QtGui.QFileDialog.getSaveFileName(self, "Save image", "", "Image files (*.jpg *.png *.tif *.*)")
fileName = fileNameTuple[0]
if not fileName:
return
image = self.ui.sceneviewerwidget.grabFrameBuffer()
image.save(fileName)
def exportSceneViewersettings(self, outputPrefix, numberOfResources):
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
si = scene.createStreaminformationScene()
si.setIOFormat(si.IO_FORMAT_THREEJS)
si.setIODataType(si.IO_FORMAT_THREEJS)
timekeepermodule = scene.getTimekeepermodule()
timekeeper = timekeepermodule.getDefaultTimekeeper()
minimum = timekeeper.getMinimumTime()
maximum = timekeeper.getMaximumTime()
time_enabled = 0
if (maximum - minimum) > 0.001:
time_enabled = 1
sv = self.ui.sceneviewerwidget.getSceneviewer()
sv.viewAll()
nearPlane = sv.getNearClippingPlane()
farPlane = sv.getFarClippingPlane()
result, eyePos, lookat, upVector = sv.getLookatParameters()
obj = { "nearPlane": nearPlane, "farPlane": farPlane, "eyePosition": eyePos, "targetPosition": lookat, "upVector": upVector, "numberOfResources": numberOfResources, "timeEnabled" : time_enabled}
outputName = outputPrefix + "_view.json"
export_f = open(outputName, "wb+")
export_f.write(json.dumps(obj))
export_f.close()
def exportScene(self, outputPrefix):
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
si = scene.createStreaminformationScene()
si.setIOFormat(si.IO_FORMAT_THREEJS)
si.setIODataType(si.IO_FORMAT_THREEJS)
timekeepermodule = scene.getTimekeepermodule()
timekeeper = timekeepermodule.getDefaultTimekeeper()
minimum = timekeeper.getMinimumTime()
maximum = timekeeper.getMaximumTime()
if (maximum - minimum) > 0.0:
si.setInitialTime(minimum)
si.setFinishTime(maximum)
si.setNumberOfTimeSteps(51)
number = si.getNumberOfResourcesRequired()
i = 0
srs = []
while i < number:
outputName = outputPrefix + "_" + str(i + 1) + ".json"
srs.append(si.createStreamresourceFile(outputName))
i = i + 1
scene.exportScene(si)
return number
def saveWebGLClicked(self):
'''
Save the view in the window to WebGL content.
'''
fileNameTuple = QtGui.QFileDialog.getSaveFileName(self, "Specify prefix", "")
fileName = fileNameTuple[0]
if not fileName:
return
#print("reading file", fileName, ", filter", fileFilter)
# set current directory to path from file, to support scripts and fieldml with external resources
# Not implemented
numberOfResources = self.exportScene(fileName)
self.exportSceneViewersettings(fileName, numberOfResources)
# main start
def main(argv):
'''
The entry point for the application, handle application arguments and initialise the
GUI.
'''
app = QtGui.QApplication(argv)
w = ZincView()
w.show()
sys.exit(app.exec_())
# main end
if __name__ == '__main__':
main(sys.argv)
|
mpl-2.0
| -1,349,197,757,580,551,200 | 40.822404 | 202 | 0.641471 | false | 4.339949 | false | false | false |
dmishin/knuth_bendix
|
vondyck.py
|
1
|
4552
|
from knuth_bendix import knuthBendix, RewriteRuleset, shortLex
def vdRule(n, m, k=2):
"""Create initial ruleset for von Dyck group with inverse elements
https://en.wikipedia.org/wiki/Triangle_group#von_Dyck_groups
"""
return RewriteRuleset({
tuple('aA'): (),
tuple('Aa'): (),
tuple('bB'): (),
tuple('Bb'): (),
tuple('BA'*k): (),
tuple('ab'*k): (),
tuple( 'A'*n ): (),
tuple( 'a'*n ): (),
tuple( 'B'*m ): (),
tuple( 'b'*m ): () })
def vdNoInverse(n, m, k=2):
return RewriteRuleset({
tuple('ab'*k): (),
tuple( 'a'*n ): (),
tuple( 'b'*m ): () })
def groupPowers(s):
last = None
lastPow = None
for x in s:
if last is None:
last = x
lastPow = 1
else:
if x == last:
lastPow += 1
else:
yield last, lastPow
last = x
lastPow = 1
if last is not None:
yield last, lastPow
def groupedShortLex(s1, s2):
p1 = tuple(groupPowers(s1))
p2 = tuple(groupPowers(s2))
print ("####", s1,p1," <> ", s2,p2)
return shortLex( p1, p2)
def groupPowersVd(s):
for x, p in groupPowers(s):
if x.upper() == x:
yield x.lower(), -p
else:
yield x, p
def showGroupedPowers(s):
if not s: return "e"
return " ".join( (x if p == 1 else x+"^"+str(p))
for x, p in groupPowersVd(s))
def printVDRules(rules1):
print ("{")
for v,w in rules1._sortedItems():
print(" {sv}\t-> {sw}".format(sv = showGroupedPowers(v),
sw = showGroupedPowers(w)))
print ("}")
import itertools
def powers(n):
#powers from n // 2 to n//2-n, excluding 0
for a,b in itertools.zip_longest( range(1, n-n//2+1),
range(1, n//2+1)):
if a is not None: yield a
if b is not None: yield -b
def powerVondyck(n, m):
""" each element of double VD group is some power of the original VD elements.
powers are orderede from highest to lowest"""
elements = []
for p in reversed(list(powers(n))):
elements.append(('a', p))
for p in reversed(list(powers(m))):
elements.append(('b', p))
element2index = {e:i for i, e in enumerate(elements)}
a = element2index[('a', 1)]
ia = element2index[('a', -1)]
b = element2index[('b', 1)]
ib = element2index[('b', -1)]
def showElement(i):
a, p = elements[i]
return "%s^%s"%(a,p)
relations = {}
#generate identities.
# powers
for i1,(c1, p1) in enumerate(elements):
for i2, (c2, p2) in enumerate(elements):
if c1 != c2: continue
order = n if c1 == 'a' else m
ps = (p1 + p2 + order//2)%order - order //2
print ("#####", showElement(i1),"*",showElement(i2),"=",ps)
if ps == 0:
relations[(i1,i2)] = ()
else:
relations[(i1,i2)] = (element2index[(c1, ps)],)
# special identities:
# abab = e
# BABA = e
relations[(a,b,a,b)] = ()
relations[(ib,ia,ib,ia)] = ()
return RewriteRuleset(relations), showElement
if __name__=="__main__":
#rules = vdNoInverse( 4, 4)
import sys
try:
p = int(sys.argv[1])
q = int(sys.argv[2])
except IndexError:
print("Usage: vondyck p q")
exit(1)
print ("========== Rule for vD({p},{q},2) ==========".format(**locals()))
rules = vdRule(p, q)
showElem = str
#rules, showElem = powerVondyck(p, q)
def showProgress(i, s):
#print ("Iteration {i}, ruleset size: {n}".format(i=i,n=s.size()))
pass
rules1 = knuthBendix (rules, onIteration=showProgress, maxRulesetSize=10000)
for i,(v,w) in enumerate(rules1._sortedItems()):
print("{i:2}) {sv}\t-> {sw}".format(i=i+1,
sv = showGroupedPowers(v),
sw = showGroupedPowers(w)))
if True:
from automaton import *
automaton, initial_state = build_accepting_automaton( 'abAB', list(rules1.suffices()) )
#symbolic growth func
print("Growth function:")
func = automaton_growth_func(automaton, initial_state)
import sympy
func = sympy.cancel(func)
print(func)
|
mit
| -8,688,118,971,699,404,000 | 26.257485 | 95 | 0.491872 | false | 3.332357 | false | false | false |
cgarrard/osgeopy-code
|
Chapter13/listing13_4.py
|
1
|
1939
|
# Script to draw world countries as patches.
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.path import Path
import matplotlib.patches as patches
from osgeo import ogr
def order_coords(coords, clockwise):
"""Orders coordinates."""
total = 0
x1, y1 = coords[0]
for x, y in coords[1:]:
total += (x - x1) * (y + y1)
x1, y1 = x, y
x, y = coords[0]
total += (x - x1) * (y + y1)
is_clockwise = total > 0
if clockwise != is_clockwise:
coords.reverse()
return coords
def make_codes(n):
"""Makes a list of path codes."""
codes = [Path.LINETO] * n
codes[0] = Path.MOVETO
return codes
def plot_polygon_patch(poly, color):
"""Plots a polygon as a patch."""
# Outer clockwise path.
coords = poly.GetGeometryRef(0).GetPoints()
coords = order_coords(coords, True)
codes = make_codes(len(coords))
for i in range(1, poly.GetGeometryCount()):
# Inner counter-clockwise paths.
coords2 = poly.GetGeometryRef(i).GetPoints()
coords2 = order_coords(coords2, False)
codes2 = make_codes(len(coords2))
# Concatenate the paths.
coords = np.concatenate((coords, coords2))
codes = np.concatenate((codes, codes2))
# Add the patch to the plot
path = Path(coords, codes)
patch = patches.PathPatch(path, facecolor=color)
plt.axes().add_patch(patch)
# Loop through all of the features in the countries layer and create
# patches for the polygons.
ds = ogr.Open(r'D:\osgeopy-data\global\ne_110m_admin_0_countries.shp')
lyr = ds.GetLayer(0)
for row in lyr:
geom = row.geometry()
if geom.GetGeometryType() == ogr.wkbPolygon:
plot_polygon_patch(geom, 'yellow')
elif geom.GetGeometryType() == ogr.wkbMultiPolygon:
for i in range(geom.GetGeometryCount()):
plot_polygon_patch(geom.GetGeometryRef(i), 'yellow')
plt.axis('equal')
plt.show()
|
mit
| 7,704,621,990,206,990,000 | 29.777778 | 70 | 0.643115 | false | 3.378049 | false | false | false |
aleneum/kogniserver
|
src/kogniserver/async.py
|
1
|
2307
|
import logging
import os
from threading import Thread
import time
try:
import asyncio
except ImportError:
# Trollius >= 0.3 was renamed
import trollius as asyncio
from autobahn.asyncio.wamp import ApplicationSession
from services import SessionHandler
class Ping(Thread):
def __init__(self, wamp):
Thread.__init__(self)
self.running = True
self.wamp = wamp
def run(self):
try:
while self.running:
logging.debug("ping")
self.wamp.publish(u'com.wamp.ping', "ping")
time.sleep(1)
except Exception as e:
logging.debug(e)
raise e
class Component(ApplicationSession):
@staticmethod
def on_ping(event):
logging.debug(event)
@asyncio.coroutine
def onJoin(self, details):
if os.environ.get('DEBUG') in ['1','True','true','TRUE']:
log_level = logging.DEBUG
else:
log_level = logging.INFO
logging.basicConfig()
logging.getLogger().setLevel(log_level)
self.session = SessionHandler(self, log_level)
# register RPC
reg = yield self.register(self.session.register_scope, u'service.displayserver.register')
rpc = yield self.register(self.session.call_rpc, u'service.displayserver.call')
# setup ping
sub = yield self.subscribe(self.on_ping, u'com.wamp.ping')
self.ping = Ping(self)
self.ping.start()
print 'kogniserver(asyncio) started...'
def onLeave(self, details):
self.ping.running = False
while self.ping.isAlive():
time.sleep(0.1)
self.session.quit()
print "kogniserver session left..."
def main_entry(ssl_cert=None):
from autobahn.asyncio.wamp import ApplicationRunner
proto = "wss" if ssl_cert else "ws"
options = None
if ssl_cert:
raise RuntimeError("asyncio backend does not support ssl")
runner = ApplicationRunner(url=u"{0}://127.0.0.1:8181/ws".format(proto),
realm=u"realm1", ssl=options)
try:
runner.run(Component)
except KeyboardInterrupt or Exception:
raise KeyboardInterrupt
print "shutting down kogniserver..."
if __name__ == '__main__':
main_entry()
|
mit
| 8,312,911,809,580,319,000 | 26.141176 | 97 | 0.61075 | false | 3.896959 | false | false | false |
jainanisha90/WeVoteServer
|
import_export_facebook/models.py
|
1
|
55791
|
# import_export_facebook/models.py
# Brought to you by We Vote. Be good.
# -*- coding: UTF-8 -*-
from django.core.validators import RegexValidator
from django.db import models
from email_outbound.models import SEND_STATUS_CHOICES, TO_BE_PROCESSED
from wevote_functions.functions import generate_random_string, positive_value_exists, convert_to_int
from exception.models import handle_exception, print_to_log
import wevote_functions.admin
import facebook
logger = wevote_functions.admin.get_logger(__name__)
FRIEND_INVITATION_FACEBOOK_TEMPLATE = 'FRIEND_INVITATION_FACEBOOK_TEMPLATE'
GENERIC_EMAIL_FACEBOOK_TEMPLATE = 'GENERIC_EMAIL_FACEBOOK_TEMPLATE'
KIND_OF_FACEBOOK_TEMPLATE_CHOICES = (
(GENERIC_EMAIL_FACEBOOK_TEMPLATE, 'Generic Email'),
(FRIEND_INVITATION_FACEBOOK_TEMPLATE, 'Invite Friend'),
)
class FacebookAuthResponse(models.Model):
"""
This is the authResponse data from a Facebook authentication
"""
voter_device_id = models.CharField(
verbose_name="voter_device_id initiating Facebook Auth", max_length=255, null=False, blank=False, unique=True)
datetime_of_authorization = models.DateTimeField(verbose_name='date and time of action', null=False, auto_now=True)
# Comes from Facebook authResponse FACEBOOK_LOGGED_IN
facebook_access_token = models.CharField(
verbose_name="accessToken from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_expires_in = models.IntegerField(verbose_name="expiresIn from Facebook", null=True, blank=True)
facebook_signed_request = models.TextField(verbose_name="signedRequest from Facebook", null=True, blank=True)
facebook_user_id = models.BigIntegerField(verbose_name="facebook big integer id", null=True, blank=True)
# Comes from FACEBOOK_RECEIVED_DATA
facebook_email = models.EmailField(verbose_name='facebook email address', max_length=255, unique=False,
null=True, blank=True)
facebook_first_name = models.CharField(
verbose_name="first_name from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_middle_name = models.CharField(
verbose_name="first_name from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_last_name = models.CharField(
verbose_name="first_name from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_profile_image_url_https = models.URLField(
verbose_name='url of voter's image from facebook', blank=True, null=True)
facebook_background_image_url_https = models.URLField(
verbose_name='url of voter's background 'cover' image from facebook \
(like the twitter banner photo)', blank=True, null=True)
facebook_background_image_offset_x = models.IntegerField(verbose_name="x offset of facebook cover image", default=0,
null=True, blank=True)
facebook_background_image_offset_y = models.IntegerField(verbose_name="y offset of facebook cover image", default=0,
null=True, blank=True)
def get_full_name(self):
full_name = self.facebook_first_name if positive_value_exists(self.facebook_first_name) else ''
full_name += " " if positive_value_exists(self.facebook_first_name) \
and positive_value_exists(self.facebook_last_name) else ''
full_name += self.facebook_last_name if positive_value_exists(self.facebook_last_name) else ''
if not positive_value_exists(full_name) and positive_value_exists(self.facebook_email):
full_name = self.email.split("@", 1)[0]
return full_name
class FacebookLinkToVoter(models.Model):
"""
This is the link between a Facebook account and a We Vote voter account
"""
voter_we_vote_id = models.CharField(verbose_name="we vote id for the email owner", max_length=255, unique=True)
facebook_user_id = models.BigIntegerField(verbose_name="facebook big integer id", null=False, unique=True)
secret_key = models.CharField(
verbose_name="secret key to verify ownership facebook account", max_length=255, null=False, unique=True)
date_last_changed = models.DateTimeField(verbose_name='date last changed', null=False, auto_now=True)
class FacebookMessageOutboundDescription(models.Model):
"""
A description of the Facebook direct message we want to send.
"""
alphanumeric = RegexValidator(r'^[0-9a-zA-Z]*$', message='Only alphanumeric characters are allowed.')
kind_of_send_template = models.CharField(max_length=50, choices=KIND_OF_FACEBOOK_TEMPLATE_CHOICES,
default=GENERIC_EMAIL_FACEBOOK_TEMPLATE)
sender_voter_we_vote_id = models.CharField(
verbose_name="we vote id for the sender", max_length=255, null=True, blank=True, unique=False)
recipient_voter_we_vote_id = models.CharField(
verbose_name="we vote id for the recipient if we have it", max_length=255, null=True, blank=True, unique=False)
recipient_facebook_id = models.BigIntegerField(verbose_name="facebook big integer id", null=True, blank=True)
recipient_facebook_email = models.EmailField(verbose_name='facebook email address', max_length=255, unique=False,
null=True, blank=True)
recipient_fb_username = models.CharField(unique=True, max_length=50, validators=[alphanumeric], null=True)
send_status = models.CharField(max_length=50, choices=SEND_STATUS_CHOICES, default=TO_BE_PROCESSED)
class FacebookUser(models.Model):
"""
My facebook friends details, from the perspective of facebook id of me
"""
facebook_user_id = models.BigIntegerField(verbose_name="facebook id of user", null=False, unique=False)
facebook_user_name = models.CharField(
verbose_name="User name from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_user_first_name = models.CharField(
verbose_name="User's first_name from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_user_middle_name = models.CharField(
verbose_name="User's middle_name from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_email = models.EmailField(verbose_name='facebook email address', max_length=255, unique=False,
null=True, blank=True)
facebook_user_location_id = models.BigIntegerField(
verbose_name="location id of Facebook user", null=True, unique=False)
facebook_user_location_name = models.CharField(
verbose_name="User's location name from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_user_gender = models.CharField(
verbose_name="User's gender from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_user_birthday = models.CharField(
verbose_name="User's birthday from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_user_last_name = models.CharField(
verbose_name="User's last_name from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_profile_image_url_https = models.URLField(verbose_name='url of voter image from facebook',
blank=True, null=True)
facebook_background_image_url_https = models.URLField(verbose_name='url of cover image from facebook',
blank=True, null=True)
facebook_background_image_offset_x = models.IntegerField(verbose_name="x offset of facebook cover image", default=0,
null=True, blank=True)
facebook_background_image_offset_y = models.IntegerField(verbose_name="y offset of facebook cover image", default=0,
null=True, blank=True)
we_vote_hosted_profile_image_url_large = models.URLField(verbose_name='we vote hosted large image url',
blank=True, null=True)
we_vote_hosted_profile_image_url_medium = models.URLField(verbose_name='we vote hosted medium image url',
blank=True, null=True)
we_vote_hosted_profile_image_url_tiny = models.URLField(verbose_name='we vote hosted tiny image url',
blank=True, null=True)
facebook_user_about = models.CharField(
verbose_name="User's About from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_user_is_verified = models.BooleanField(
verbose_name="User is verfired from Facebook", default=False)
facebook_user_friend_total_count = models.BigIntegerField(
verbose_name="total count of friends from facebook", null=True, unique=False)
class FacebookFriendsUsingWeVote(models.Model):
"""
My facebook friends ids who are already using Wvote App, from the perspective of facebook id of me
"""
facebook_id_of_me = models.BigIntegerField(verbose_name="facebook id of viewer", null=False, unique=False)
facebook_id_of_my_friend = models.BigIntegerField(verbose_name="facebook id of my friend", null=False, unique=False)
class FacebookManager(models.Model):
def __unicode__(self):
return "FacebookManager"
def create_facebook_link_to_voter(self, facebook_user_id, voter_we_vote_id):
create_new_facebook_link = True
facebook_link_to_voter = None
facebook_link_to_voter_saved = False
status = ""
success = True
if not positive_value_exists(facebook_user_id) or not positive_value_exists(voter_we_vote_id):
status += "CREATE_FACEBOOK_LINK_MISSING_REQUIRED_VARIABLES "
print_to_log(logger=logger, exception_message_optional=status)
success = False
results = {
'success': success,
'status': status,
'facebook_link_to_voter_saved': facebook_link_to_voter_saved,
'facebook_link_to_voter': facebook_link_to_voter,
}
return results
# Does a link already exist?
try:
facebook_link_to_voter = FacebookLinkToVoter.objects.get(
facebook_user_id=facebook_user_id,
)
success = True
status += "FACEBOOK_LINK_TO_VOTER_ALREADY_EXISTS "
if voter_we_vote_id == facebook_link_to_voter.voter_we_vote_id:
facebook_link_to_voter_saved = True
create_new_facebook_link = False
success = True
else:
# Write over existing voter_we_vote_id
create_new_facebook_link = False
try:
facebook_link_to_voter.voter_we_vote_id = voter_we_vote_id
facebook_link_to_voter.save()
success = True
except Exception as e:
status += "FACEBOOK_LINK_TO_VOTER-UPDATE_VOTER_WE_VOTE_ID_FAILED (" + str(voter_we_vote_id) + ") "
handle_exception(e, logger=logger, exception_message=status)
success = False
except FacebookAuthResponse.DoesNotExist:
status += "EXISTING_FACEBOOK_LINK_TO_VOTER_DOES_NOT_EXIST "
except Exception as e:
status += "FACEBOOK_LINK_TO_VOTER-GET_FAILED "
handle_exception(e, logger=logger, exception_message=status)
# Any attempts to save a facebook_link using either facebook_user_id or voter_we_vote_id that already
# exist in the table will fail, since those fields are required to be unique.
if create_new_facebook_link:
facebook_secret_key = generate_random_string(12)
try:
facebook_link_to_voter = FacebookLinkToVoter.objects.create(
facebook_user_id=facebook_user_id,
voter_we_vote_id=voter_we_vote_id,
secret_key=facebook_secret_key,
)
facebook_link_to_voter_saved = True
success = True
status += "FACEBOOK_LINK_TO_VOTER_CREATED "
except Exception as e:
facebook_link_to_voter_saved = False
facebook_link_to_voter = FacebookLinkToVoter()
success = False
status += "FACEBOOK_LINK_TO_VOTER_NOT_CREATED "
handle_exception(e, logger=logger, exception_message=status)
results = {
'success': success,
'status': status,
'facebook_link_to_voter_saved': facebook_link_to_voter_saved,
'facebook_link_to_voter': facebook_link_to_voter,
}
return results
def update_or_create_facebook_auth_response(
self, voter_device_id, facebook_access_token, facebook_user_id, facebook_expires_in,
facebook_signed_request,
facebook_email, facebook_first_name, facebook_middle_name, facebook_last_name,
facebook_profile_image_url_https, facebook_background_image_url_https,
facebook_background_image_offset_x, facebook_background_image_offset_y):
"""
:param voter_device_id:
:param facebook_access_token:
:param facebook_user_id:
:param facebook_expires_in:
:param facebook_signed_request:
:param facebook_email:
:param facebook_first_name:
:param facebook_middle_name:
:param facebook_last_name:
:param facebook_profile_image_url_https:
:param facebook_background_image_url_https:
:param facebook_background_image_offset_x:
:param facebook_background_image_offset_y:
:return:
"""
defaults = {
"voter_device_id": voter_device_id,
}
if positive_value_exists(facebook_access_token):
defaults["facebook_access_token"] = facebook_access_token
if positive_value_exists(facebook_user_id):
defaults["facebook_user_id"] = facebook_user_id
if positive_value_exists(facebook_expires_in):
defaults["facebook_expires_in"] = facebook_expires_in
if positive_value_exists(facebook_signed_request):
defaults["facebook_signed_request"] = facebook_signed_request
if positive_value_exists(facebook_email):
defaults["facebook_email"] = facebook_email
if positive_value_exists(facebook_first_name):
defaults["facebook_first_name"] = facebook_first_name
if positive_value_exists(facebook_middle_name):
defaults["facebook_middle_name"] = facebook_middle_name
if positive_value_exists(facebook_last_name):
defaults["facebook_last_name"] = facebook_last_name
if positive_value_exists(facebook_profile_image_url_https):
defaults["facebook_profile_image_url_https"] = facebook_profile_image_url_https
if positive_value_exists(facebook_background_image_url_https):
defaults["facebook_background_image_url_https"] = facebook_background_image_url_https
# A zero value for the offsets can be a valid value. If we received an image, we also received the offsets.
try:
defaults["facebook_background_image_offset_x"] = int(facebook_background_image_offset_x)
except Exception:
defaults["facebook_background_image_offset_x"] = 0
try:
defaults["facebook_background_image_offset_y"] = int(facebook_background_image_offset_y)
except Exception:
defaults["facebook_background_image_offset_y"] = 0
try:
facebook_auth_response, created = FacebookAuthResponse.objects.update_or_create(
voter_device_id__iexact=voter_device_id,
defaults=defaults,
)
facebook_auth_response_saved = True
success = True
status = "FACEBOOK_AUTH_RESPONSE_UPDATED_OR_CREATED"
except Exception as e:
facebook_auth_response_saved = False
facebook_auth_response = FacebookAuthResponse()
success = False
created = False
status = "FACEBOOK_AUTH_RESPONSE_NOT_UPDATED_OR_CREATED"
handle_exception(e, logger=logger, exception_message=status)
results = {
'success': success,
'status': status,
'facebook_auth_response_saved': facebook_auth_response_saved,
'facebook_auth_response_created': created,
'facebook_auth_response': facebook_auth_response,
}
return results
def create_or_update_facebook_friends_using_we_vote(self, facebook_id_of_me, facebook_id_of_my_friend):
"""
We use this subroutine to create or update FacebookFriendsUsingWeVote table with my friends facebook id.
:param facebook_id_of_me:
:param facebook_id_of_my_friend:
:return:
"""
try:
facebook_friends_using_we_vote, created = FacebookFriendsUsingWeVote.objects.update_or_create(
facebook_id_of_me=facebook_id_of_me,
facebook_id_of_my_friend=facebook_id_of_my_friend,
defaults={
'facebook_id_of_me': facebook_id_of_me,
'facebook_id_of_my_friend': facebook_id_of_my_friend
}
)
facebook_friends_using_we_vote_saved = True
success = True
status = "FACEBOOK_FRIENDS_USING_WE_VOTE_CREATED"
except Exception as e:
facebook_friends_using_we_vote_saved = False
facebook_friends_using_we_vote = FacebookFriendsUsingWeVote()
success = False
status = "FACEBOOK_FRIENDS_USING_WE_VOTE_CREATED"
handle_exception(e, logger=logger, exception_message=status)
results = {
'success': success,
'status': status,
'facebook_friends_using_we_vote_saved': facebook_friends_using_we_vote_saved,
'facebook_friends_using_we_vote': facebook_friends_using_we_vote,
}
return results
def create_or_update_facebook_user(self, facebook_user_id, facebook_user_first_name, facebook_user_middle_name,
facebook_user_last_name, facebook_user_name=None, facebook_user_location_id=None,
facebook_user_location_name=None, facebook_user_gender=None,
facebook_user_birthday=None, facebook_profile_image_url_https=None,
facebook_background_image_url_https=None, facebook_user_about=None,
facebook_user_is_verified=False, facebook_user_friend_total_count=None,
we_vote_hosted_profile_image_url_large=None,
we_vote_hosted_profile_image_url_medium=None,
we_vote_hosted_profile_image_url_tiny=None,
facebook_email=None):
"""
We use this subroutine to create or update FacebookUser table with my friends details.
:param facebook_user_id:
:param facebook_user_first_name:
:param facebook_user_middle_name:
:param facebook_user_last_name:
:param facebook_user_name:
:param facebook_user_location_id:
:param facebook_user_location_name:
:param facebook_user_gender:
:param facebook_user_birthday:
:param facebook_profile_image_url_https:
:param facebook_background_image_url_https:
:param facebook_user_about:
:param facebook_user_is_verified:
:param facebook_user_friend_total_count:
:param we_vote_hosted_profile_image_url_large:
:param we_vote_hosted_profile_image_url_medium:
:param we_vote_hosted_profile_image_url_tiny:
:param facebook_email:
:return:
"""
try:
# for facebook_user_entry in facebook_users:
facebook_user, created = FacebookUser.objects.update_or_create(
facebook_user_id=facebook_user_id,
defaults={
'facebook_user_id': facebook_user_id,
'facebook_user_name': facebook_user_name,
'facebook_user_first_name': facebook_user_first_name,
'facebook_user_middle_name': facebook_user_middle_name,
'facebook_user_last_name': facebook_user_last_name,
'facebook_email': facebook_email,
'facebook_user_location_id': facebook_user_location_id,
'facebook_user_location_name': facebook_user_location_name,
'facebook_user_gender': facebook_user_gender,
'facebook_user_birthday': facebook_user_birthday,
'facebook_profile_image_url_https': facebook_profile_image_url_https,
'facebook_background_image_url_https': facebook_background_image_url_https,
'facebook_user_about': facebook_user_about,
'facebook_user_is_verified': facebook_user_is_verified,
'facebook_user_friend_total_count': facebook_user_friend_total_count,
'we_vote_hosted_profile_image_url_large': we_vote_hosted_profile_image_url_large,
'we_vote_hosted_profile_image_url_medium': we_vote_hosted_profile_image_url_medium,
'we_vote_hosted_profile_image_url_tiny': we_vote_hosted_profile_image_url_tiny
}
)
facebook_user_saved = True
success = True
status = " FACEBOOK_USER_CREATED"
except Exception as e:
facebook_user_saved = False
facebook_user = FacebookUser()
success = False
status = " FACEBOOK_USER_NOT_CREATED"
handle_exception(e, logger=logger, exception_message=status)
results = {
'success': success,
'status': status,
'facebook_user_saved': facebook_user_saved,
'facebook_user': facebook_user,
}
return results
def reset_facebook_user_image_details(self, facebook_user_id, facebook_profile_image_url_https,
facebook_background_image_url_https):
"""
Reset an facebook user entry with original image details from we vote image.
:param facebook_user_id:
:param facebook_profile_image_url_https:
:param facebook_background_image_url_https:
:return:
"""
success = False
status = "ENTERING_RESET_FACEBOOK_USER_IMAGE_DETAILS"
values_changed = False
facebook_user_results = self.retrieve_facebook_user_by_facebook_user_id(facebook_user_id)
facebook_user = facebook_user_results['facebook_user']
if facebook_user_results['facebook_user_found']:
if positive_value_exists(facebook_profile_image_url_https):
facebook_user.facebook_profile_image_url_https = facebook_profile_image_url_https
values_changed = True
if positive_value_exists(facebook_background_image_url_https):
facebook_user.facebook_background_image_url_https = facebook_background_image_url_https
values_changed = True
facebook_user.we_vote_hosted_profile_image_url_large = ''
facebook_user.we_vote_hosted_profile_image_url_medium = ''
facebook_user.we_vote_hosted_profile_image_url_tiny = ''
if values_changed:
facebook_user.save()
success = True
status = "RESET_FACEBOOK_USER_IMAGE_DETAILS"
else:
success = True
status = "NO_CHANGES_RESET_TO_FACEBOOK_USER_IMAGE_DETAILS"
results = {
'success': success,
'status': status,
'facebook_user': facebook_user,
}
return results
def update_facebook_user_details(self, facebook_user,
cached_facebook_profile_image_url_https=False,
cached_facebook_background_image_url_https=False,
we_vote_hosted_profile_image_url_large=False,
we_vote_hosted_profile_image_url_medium=False,
we_vote_hosted_profile_image_url_tiny=False):
"""
Update an facebook user entry with cached image urls
:param facebook_user:
:param cached_facebook_profile_image_url_https:
:param cached_facebook_background_image_url_https:
:param we_vote_hosted_profile_image_url_large:
:param we_vote_hosted_profile_image_url_medium:
:param we_vote_hosted_profile_image_url_tiny:
:return:
"""
success = False
status = "ENTERING_UPDATE_FACEBOOK_USER_DETAILS"
values_changed = False
if facebook_user:
if positive_value_exists(cached_facebook_profile_image_url_https):
facebook_user.facebook_profile_image_url_https = cached_facebook_profile_image_url_https
values_changed = True
if positive_value_exists(cached_facebook_background_image_url_https):
facebook_user.facebook_background_image_url_https = cached_facebook_background_image_url_https
values_changed = True
if positive_value_exists(we_vote_hosted_profile_image_url_large):
facebook_user.we_vote_hosted_profile_image_url_large = we_vote_hosted_profile_image_url_large
values_changed = True
if positive_value_exists(we_vote_hosted_profile_image_url_medium):
facebook_user.we_vote_hosted_profile_image_url_medium = we_vote_hosted_profile_image_url_medium
values_changed = True
if positive_value_exists(we_vote_hosted_profile_image_url_tiny):
facebook_user.we_vote_hosted_profile_image_url_tiny = we_vote_hosted_profile_image_url_tiny
values_changed = True
if values_changed:
facebook_user.save()
success = True
status = "SAVED_FACEBOOK_USER_DETAILS"
else:
success = True
status = "NO_CHANGES_SAVED_TO_FACBOOK_USER_DETAILS"
results = {
'success': success,
'status': status,
'facebook_user': facebook_user,
}
return results
def retrieve_facebook_auth_response(self, voter_device_id):
"""
:param voter_device_id:
:return:
"""
facebook_auth_response = FacebookAuthResponse()
facebook_auth_response_id = 0
try:
if positive_value_exists(voter_device_id):
facebook_auth_response = FacebookAuthResponse.objects.get(
voter_device_id__iexact=voter_device_id,
)
facebook_auth_response_id = facebook_auth_response.id
facebook_auth_response_found = True
success = True
status = "RETRIEVE_FACEBOOK_AUTH_RESPONSE_FOUND_BY_VOTER_DEVICE_ID "
else:
facebook_auth_response_found = False
success = False
status = "RETRIEVE_FACEBOOK_AUTH_RESPONSE_VARIABLES_MISSING "
except FacebookAuthResponse.DoesNotExist:
facebook_auth_response_found = False
success = True
status = "RETRIEVE_FACEBOOK_AUTH_RESPONSE_NOT_FOUND "
except Exception as e:
facebook_auth_response_found = False
success = False
status = 'FAILED retrieve_facebook_auth_response '
handle_exception(e, logger=logger, exception_message=status)
results = {
'success': success,
'status': status,
'facebook_auth_response_found': facebook_auth_response_found,
'facebook_auth_response_id': facebook_auth_response_id,
'facebook_auth_response': facebook_auth_response,
}
return results
def retrieve_facebook_auth_response_from_facebook_id(self, facebook_user_id):
"""
Retrieve facebook auth response from facebook user id
:param facebook_user_id:
:return:
"""
facebook_auth_response = FacebookAuthResponse()
facebook_auth_response_id = 0
try:
if positive_value_exists(facebook_user_id):
facebook_auth_response = FacebookAuthResponse.objects.get(
facebook_user_id=facebook_user_id,
)
facebook_auth_response_id = facebook_auth_response.id
facebook_auth_response_found = True
success = True
status = "RETRIEVE_FACEBOOK_AUTH_RESPONSE_FOUND_BY_FACEBOOK_USER_ID "
else:
facebook_auth_response_found = False
success = False
status = "RETRIEVE_FACEBOOK_AUTH_RESPONSE_VARIABLES_MISSING "
except FacebookAuthResponse.DoesNotExist:
facebook_auth_response_found = False
success = True
status = "RETRIEVE_FACEBOOK_AUTH_RESPONSE_NOT_FOUND "
except Exception as e:
facebook_auth_response_found = False
success = False
status = 'FAILED retrieve_facebook_auth_response'
handle_exception(e, logger=logger, exception_message=status)
results = {
'success': success,
'status': status,
'facebook_auth_response_found': facebook_auth_response_found,
'facebook_auth_response_id': facebook_auth_response_id,
'facebook_auth_response': facebook_auth_response,
}
return results
def fetch_facebook_id_from_voter_we_vote_id(self, voter_we_vote_id):
facebook_user_id = 0
facebook_results = self.retrieve_facebook_link_to_voter(facebook_user_id, voter_we_vote_id)
if facebook_results['facebook_link_to_voter_found']:
facebook_link_to_voter = facebook_results['facebook_link_to_voter']
facebook_user_id = facebook_link_to_voter.facebook_user_id
return facebook_user_id
def retrieve_facebook_link_to_voter_from_facebook_id(self, facebook_user_id):
return self.retrieve_facebook_link_to_voter(facebook_user_id)
def retrieve_facebook_link_to_voter_from_voter_we_vote_id(self, voter_we_vote_id):
facebook_user_id = 0
facebook_secret_key = ""
return self.retrieve_facebook_link_to_voter(facebook_user_id, voter_we_vote_id, facebook_secret_key)
def retrieve_facebook_link_to_voter_from_facebook_secret_key(self, facebook_secret_key):
facebook_user_id = 0
voter_we_vote_id = ""
return self.retrieve_facebook_link_to_voter(facebook_user_id, voter_we_vote_id, facebook_secret_key)
def retrieve_facebook_link_to_voter(self, facebook_user_id=0, voter_we_vote_id='', facebook_secret_key=''):
"""
:param facebook_user_id:
:param voter_we_vote_id:
:param facebook_secret_key:
:return:
"""
facebook_link_to_voter = FacebookLinkToVoter()
facebook_link_to_voter_id = 0
try:
if positive_value_exists(facebook_user_id):
facebook_link_to_voter = FacebookLinkToVoter.objects.get(
facebook_user_id=facebook_user_id,
)
facebook_link_to_voter_id = facebook_link_to_voter.id
facebook_link_to_voter_found = True
success = True
status = "RETRIEVE_FACEBOOK_LINK_TO_VOTER_FOUND_BY_FACEBOOK_USER_ID "
elif positive_value_exists(voter_we_vote_id):
facebook_link_to_voter = FacebookLinkToVoter.objects.get(
voter_we_vote_id__iexact=voter_we_vote_id,
)
facebook_link_to_voter_id = facebook_link_to_voter.id
facebook_link_to_voter_found = True
success = True
status = "RETRIEVE_FACEBOOK_LINK_TO_VOTER_FOUND_BY_VOTER_WE_VOTE_ID "
elif positive_value_exists(facebook_secret_key):
facebook_link_to_voter = FacebookLinkToVoter.objects.get(
secret_key=facebook_secret_key,
)
facebook_link_to_voter_id = facebook_link_to_voter.id
facebook_link_to_voter_found = True
success = True
status = "RETRIEVE_FACEBOOK_LINK_TO_VOTER_FOUND_BY_FACEBOOK_SECRET_KEY "
else:
facebook_link_to_voter_found = False
success = False
status = "RETRIEVE_FACEBOOK_LINK_TO_VOTER_VARIABLES_MISSING "
except FacebookLinkToVoter.DoesNotExist:
facebook_link_to_voter_found = False
success = True
status = "RETRIEVE_FACEBOOK_LINK_TO_VOTER_NOT_FOUND"
except Exception as e:
facebook_link_to_voter_found = False
success = False
status = 'FAILED retrieve_facebook_link_to_voter '
handle_exception(e, logger=logger, exception_message=status)
results = {
'success': success,
'status': status,
'facebook_link_to_voter_found': facebook_link_to_voter_found,
'facebook_link_to_voter_id': facebook_link_to_voter_id,
'facebook_link_to_voter': facebook_link_to_voter,
}
return results
def extract_facebook_details_data(self, facebook_friend_api_details_entry):
"""
Extracting facebook friend details with required fields
:param facebook_friend_api_details_entry:
:return:
"""
facebook_friend_dict = {}
facebook_friend_dict['facebook_user_id'] = (facebook_friend_api_details_entry.get('id')
if 'id' in facebook_friend_api_details_entry.keys() else None)
facebook_friend_dict['facebook_user_name'] = (facebook_friend_api_details_entry.get('name')
if 'name' in facebook_friend_api_details_entry.keys() else "")
facebook_friend_dict['facebook_user_first_name'] = (facebook_friend_api_details_entry.get('first_name')
if 'first_name' in facebook_friend_api_details_entry.keys()
else "")
facebook_friend_dict['facebook_user_middle_name'] = (facebook_friend_api_details_entry.get('middle_name')
if 'middle_name' in facebook_friend_api_details_entry.
keys() else "")
facebook_friend_dict['facebook_user_last_name'] = (facebook_friend_api_details_entry.get('last_name')
if 'last_name' in facebook_friend_api_details_entry.keys()
else "")
facebook_friend_dict['facebook_user_location_id'] = (facebook_friend_api_details_entry.get('location').get('id')
if 'location' in facebook_friend_api_details_entry.keys()
and facebook_friend_api_details_entry.
get('location', {}).get('id', {}) else None)
facebook_friend_dict['facebook_user_location_name'] = (facebook_friend_api_details_entry.get('location').get(
'name') if 'location' in facebook_friend_api_details_entry.keys() and facebook_friend_api_details_entry.get(
'location', {}).get('name', {}) else "")
facebook_friend_dict['facebook_user_gender'] = (facebook_friend_api_details_entry.get('gender')
if 'gender' in facebook_friend_api_details_entry.keys() else "")
facebook_friend_dict['facebook_user_birthday'] = (facebook_friend_api_details_entry.get('birthday')
if 'birthday' in facebook_friend_api_details_entry.keys()
else "")
# is_silhouette is true for default image of facebook
facebook_friend_dict['facebook_profile_image_url_https'] = \
(facebook_friend_api_details_entry.get(
'picture').get('data').get('url') if 'picture' in facebook_friend_api_details_entry.keys() and
facebook_friend_api_details_entry.get('picture', {}).get('data', {}).get('url', {}) and
not facebook_friend_api_details_entry.get('picture', {}).get('data', {}).get('is_silhouette', True)
else "")
facebook_friend_dict['facebook_background_image_url_https'] = \
(facebook_friend_api_details_entry.get('cover').get('source')
if 'cover' in facebook_friend_api_details_entry.keys() and
facebook_friend_api_details_entry.get('cover', {}).get('source', {}) else "")
facebook_friend_dict['facebook_user_about'] = (facebook_friend_api_details_entry.get('about')
if 'about' in facebook_friend_api_details_entry.keys() else "")
facebook_friend_dict['facebook_user_is_verified'] = (facebook_friend_api_details_entry.get('is_verified')
if 'is_verified' in facebook_friend_api_details_entry.
keys() else "")
return facebook_friend_dict
def retrieve_facebook_friends_from_facebook(self, voter_device_id):
"""
This function is for getting facebook friends who are already using WeVote
NOTE August 2017: The facebook "friends" API call when called from the server now only returns that subset of
your facebook friends who are already on WeVote, it will not show your friends who do not have the facebook
app on their facebook settings page. It is unclear if this code even works at all. The code that does the
job is in the WebApp using the "games" api "invitiable_friends" call.
If having problems see the note in client side WebApp FacebookInvitableFriends.jsx
Technical discussion: https://stackoverflow.com/questions/23417356
We use this routine to retrieve my facebook friends details and updating FacebookFriendsUsingWeVote table
:param voter_device_id:
:return: facebook_friends_list
"""
success = False
status = ''
facebook_friends_list_found = False
facebook_friends_list = []
facebook_api_fields = "id, name, first_name, middle_name, last_name, location{id, name}, gender, birthday, " \
"cover{source}, picture.width(200).height(200){url, is_silhouette}, about, is_verified "
auth_response_results = self.retrieve_facebook_auth_response(voter_device_id)
if not auth_response_results['facebook_auth_response_found']:
error_results = {
'status': "FACEBOOK_AUTH_RESPONSE_NOT_FOUND",
'success': success,
'facebook_friends_list_found': facebook_friends_list_found,
'facebook_friends_list': facebook_friends_list,
}
return error_results
facebook_auth_response = auth_response_results['facebook_auth_response']
try:
facebook_graph = facebook.GraphAPI(facebook_auth_response.facebook_access_token, version='2.7')
facebook_friends_api_details = facebook_graph.get_connections(id=facebook_auth_response.facebook_user_id,
connection_name="friends",
fields=facebook_api_fields)
# graph.get_connections returns three dictionary keys i.e. data, paging, summary,
# here data key contains list of friends with the given fields values and paging contains cursors positions
# and summary contains total_count of your friends, for ex:
# {"data": [{"name": "Micheal", "first_name": "Micheal", "id": "16086981492"},
# {"name": "John", "first_name": "John", "id": "1263984"],
# "paging": {"cursors": {"before": "QVFmc0QVBsZAk1KWmNwRVFoRzB1MGFDWlpoa3J0NFR6VTQZD",
# "after": "QVFIUlAzdGplaWV5YTZAmeUNCNzVuRk1iPZAnhUNjltUldoSjR5aWZAxdGJ2UktEUHQzNWpBeHRmcEkZD"}},
# "summary": {'total_count': 10}}
for facebook_friend_api_details_entry in facebook_friends_api_details.get('data', []):
# Extract required details for each facebook friend and then updating FacebookFriendsUsingWeVote table
facebook_friend_dict = self.extract_facebook_details_data(facebook_friend_api_details_entry)
facebook_friend_dict['facebook_user_friend_total_count'] = (
facebook_friend_api_details_entry.get('friends').get('summary').get('total_count')
if facebook_friend_api_details_entry.get('friends', {}).get('summary', {}).get('total_count', {})
else None)
if facebook_friend_dict not in facebook_friends_list:
facebook_friends_list.append(facebook_friend_dict)
facebook_friends_saved_results = self.create_or_update_facebook_friends_using_we_vote(
facebook_auth_response.facebook_user_id, facebook_friend_dict.get('facebook_user_id'))
status += ' ' + facebook_friends_saved_results['status']
if facebook_friends_api_details.get('data', []).__len__() == 0:
logger.debug("retrieve_facebook_friends_from_facebook received zero friends from the API")
success = True
status += " " + "FACEBOOK_FRIENDS_LIST_FOUND"
facebook_friends_list_found = True
except Exception as e:
success = False
status += " " + "FACEBOOK_FRIENDS_LIST_FAILED_WITH_EXCEPTION"
facebook_friends_list_found = False
handle_exception(e, logger=logger, exception_message=status)
results = {
'success': success,
'status': status,
'facebook_friends_list_found': facebook_friends_list_found,
'facebook_friends_list': facebook_friends_list,
}
return results
def retrieve_facebook_friends_using_we_vote_list(self, facebook_id_of_me):
"""
Reterive my friends facebook ids from FacebookFriendsUsingWeVote table.
:param facebook_id_of_me:
:return:
"""
status = ""
facebook_friends_using_we_vote_list = []
if not positive_value_exists(facebook_id_of_me):
success = False
status = 'RETRIEVE_FACEBOOK_FRIENDS_USING_WE_VOTE-MISSING_FACEBOOK_ID '
results = {
'success': success,
'status': status,
'facebook_friends_using_we_vote_list_found': False,
'facebook_friends_using_we_vote_list': [],
}
return results
try:
facebook_friends_using_we_vote_queryset = FacebookFriendsUsingWeVote.objects.all()
facebook_friends_using_we_vote_queryset = facebook_friends_using_we_vote_queryset.filter(
facebook_id_of_me=facebook_id_of_me)
facebook_friends_using_we_vote_list = facebook_friends_using_we_vote_queryset
if len(facebook_friends_using_we_vote_list):
success = True
facebook_friends_using_we_vote_list_found = True
status += ' FACEBOOK_FRIENDS_USING_WE_VOTE_LIST_RETRIEVED '
else:
success = True
facebook_friends_using_we_vote_list_found = False
status += ' NO_FACEBOOK_FRIENDS_USING_WE_VOTE_LIST_RETRIEVED '
except FacebookFriendsUsingWeVote.DoesNotExist:
# No data found. Not a problem.
success = True
facebook_friends_using_we_vote_list_found = False
status += ' NO_FACEBOOK_FRIENDS_USING_WE_VOTE_LIST_RETRIEVED_DoesNotExist '
facebook_friends_using_we_vote_list = []
except Exception as e:
success = False
facebook_friends_using_we_vote_list_found = False
status += ' FAILED retrieve_facebook_friends_using_we_vote_list FacebookFriendsUsingWeVote '
handle_exception(e, logger=logger, exception_message=status)
results = {
'success': success,
'status': status,
'facebook_friends_using_we_vote_list_found': facebook_friends_using_we_vote_list_found,
'facebook_friends_using_we_vote_list': facebook_friends_using_we_vote_list,
}
return results
def extract_facebook_user_details(self, facebook_user_api_details):
"""
Extracting facebook user details with required fields
:param facebook_user_api_details:
:return:
"""
facebook_user_details_dict = {
'facebook_search_found': True
}
facebook_user_details_dict['about'] = (facebook_user_api_details.get('about')
if 'about' in facebook_user_api_details.keys() else "")
facebook_user_details_dict['location'] = ""
if 'location' in facebook_user_api_details.keys():
if 'city' in facebook_user_api_details.get('location'):
facebook_user_details_dict['location'] += facebook_user_api_details.get('location').get('city')
if 'street' in facebook_user_api_details.get('location'):
facebook_user_details_dict['location'] += ", " + facebook_user_api_details.get('location').get('street')
if 'zip' in facebook_user_api_details.get('location'):
facebook_user_details_dict['location'] += ", " + facebook_user_api_details.get('location').get('zip')
photos = (facebook_user_api_details.get('photos').get(
'data') if 'photos' in facebook_user_api_details.keys() and facebook_user_api_details.get(
'photos', {}).get('data', []) else "")
facebook_user_details_dict['photos'] = " ".join([str(photo.get('picture'))
for photo in photos if 'picture' in photo.keys()])
facebook_user_details_dict['bio'] = (facebook_user_api_details.get('bio')
if 'bio' in facebook_user_api_details.keys() else "")
facebook_user_details_dict['general_info'] = (facebook_user_api_details.get('general_info')
if 'general_info' in facebook_user_api_details.
keys() else "")
facebook_user_details_dict['description'] = (facebook_user_api_details.get('description')
if 'description' in facebook_user_api_details.keys()
else "")
facebook_user_details_dict['features'] = (facebook_user_api_details.get('features')
if 'features' in facebook_user_api_details.keys() else "")
facebook_user_details_dict['contact_address'] = (facebook_user_api_details.get('contact_address')
if 'contact_address' in
facebook_user_api_details.keys() else "")
facebook_user_details_dict['emails'] = " ".join(facebook_user_api_details.get('emails')
if 'emails' in facebook_user_api_details.keys() else [])
facebook_user_details_dict['name'] = (facebook_user_api_details.get('name')
if 'name' in facebook_user_api_details.keys() else "")
facebook_user_details_dict['mission'] = (facebook_user_api_details.get('mission')
if 'mission' in facebook_user_api_details.keys() else "")
facebook_user_details_dict['category'] = (facebook_user_api_details.get('category')
if 'category' in facebook_user_api_details.keys() else "")
facebook_user_details_dict['website'] = (facebook_user_api_details.get('website')
if 'website' in facebook_user_api_details.keys() else "")
facebook_user_details_dict['personal_interests'] = (facebook_user_api_details.get('personal_interests')
if 'personal_interests' in
facebook_user_api_details.keys() else "")
facebook_user_details_dict['personal_info'] = (facebook_user_api_details.get('personal_info')
if 'personal_info' in facebook_user_api_details.keys()
else "")
posts = (facebook_user_api_details.get('posts').get(
'data') if 'posts' in facebook_user_api_details.keys() and facebook_user_api_details.get(
'posts', {}).get('data', []) else "")
facebook_user_details_dict['posts'] = " ".join([str(post.get('message'))
for post in posts if 'message' in post.keys()])
return facebook_user_details_dict
def retrieve_facebook_user_details_from_facebook(self, voter_device_id, facebook_user_name):
"""
:param voter_device_id:
:param facebook_user_name:
:return:
"""
success = False
status = ''
facebook_user_details_found = False
facebook_user_details_dict = {
'facebook_search_found': facebook_user_details_found
}
facebook_api_fields = "about, location, photos{picture}, bio, general_info, description, features, " \
"contact_address, emails, posts.limit(10){message}, name, mission, category," \
"website, personal_interests, personal_info"
auth_response_results = self.retrieve_facebook_auth_response(voter_device_id)
if not auth_response_results['facebook_auth_response_found']:
error_results = {
'status': "FACEBOOK_AUTH_RESPONSE_NOT_FOUND",
'success': success,
'facebook_user_details_found': facebook_user_details_found,
'facebook_user_details': facebook_user_details_dict,
}
return error_results
facebook_auth_response = auth_response_results['facebook_auth_response']
try:
facebook_graph = facebook.GraphAPI(facebook_auth_response.facebook_access_token, version='2.7')
facebook_user_api_details = facebook_graph.get_object(id=facebook_user_name,
fields=facebook_api_fields)
facebook_user_details_dict = self.extract_facebook_user_details(facebook_user_api_details)
success = True
status += " " + "FACEBOOK_USER_DETAILS_FOUND"
facebook_user_details_found = True
except Exception as e:
success = False
status += " " + "FACEBOOK_USER_DETAILS_FAILED_WITH_EXCEPTION"
facebook_user_details_found = False
handle_exception(e, logger=logger, exception_message=status)
results = {
'success': success,
'status': status,
'facebook_user_details_found': facebook_user_details_found,
'facebook_user_details': facebook_user_details_dict,
}
return results
def retrieve_facebook_user_by_facebook_user_id(self, facebook_user_id):
"""
Retrieve facebook user from FacebookUser table.
:param facebook_user_id:
:return:
"""
status = ""
facebook_user = FacebookUser()
try:
facebook_user = FacebookUser.objects.get(
facebook_user_id=facebook_user_id
)
success = True
facebook_user_found = True
status += ' FACEBOOK_USER_RETRIEVED '
except FacebookUser.DoesNotExist:
# No data found. Not a problem.
success = True
facebook_user_found = False
status += ' NO_FACEBOOK_USER_RETRIEVED_DoesNotExist '
except Exception as e:
success = False
facebook_user_found = False
status += ' FAILED retrieve_facebook_user FacebookUser '
handle_exception(e, logger=logger, exception_message=status)
results = {
'success': success,
'status': status,
'facebook_user_found': facebook_user_found,
'facebook_user': facebook_user,
}
return results
def remove_my_facebook_entry_from_suggested_friends_list(self, facebook_suggested_friends_list, facebook_id_of_me):
"""
Facebook graph API method for friends friend return own user entry thats why removing it from
suggested friend list
:param facebook_suggested_friends_list:
:param facebook_id_of_me:
:return:
"""
for facebook_user_entry in facebook_suggested_friends_list:
if convert_to_int(facebook_user_entry['facebook_user_id']) == facebook_id_of_me:
facebook_suggested_friends_list.remove(facebook_user_entry)
return facebook_suggested_friends_list
|
mit
| -7,461,401,211,381,602,000 | 53.536657 | 120 | 0.582764 | false | 4.082766 | false | false | false |
arthurmensch/modl
|
benchmarks/log.py
|
1
|
2179
|
import time
import numpy as np
from lightning.impl.primal_cd import CDClassifier
from lightning.impl.sag import SAGAClassifier
from sklearn.datasets import fetch_20newsgroups_vectorized
from lightning.classification import SAGClassifier
from sklearn.linear_model import LogisticRegression
bunch = fetch_20newsgroups_vectorized(subset="all")
X = bunch.data
y = bunch.target
y[y >= 1] = 1
alpha = 1e-3
n_samples = X.shape[0]
sag = SAGClassifier(eta='auto',
loss='log',
alpha=alpha,
tol=1e-10,
max_iter=1000,
verbose=1,
random_state=0)
saga = SAGAClassifier(eta='auto',
loss='log',
alpha=alpha,
tol=1e-10,
max_iter=1000,
verbose=1,
random_state=0)
cd_classifier = CDClassifier(loss='log',
alpha=alpha / 2,
C=1 / n_samples,
tol=1e-10,
max_iter=100,
verbose=1,
random_state=0)
sklearn_sag = LogisticRegression(tol=1e-10, max_iter=1000,
verbose=2, random_state=0,
C=1. / (n_samples * alpha),
solver='sag',
penalty='l2',
fit_intercept=False)
classifiers = [{'name': 'Lightning SAG', 'estimator': sag},
{'name': 'Lightning SAGA', 'estimator': saga},
{'name': 'Sklearn SAG', 'estimator': sklearn_sag},
{'name': 'Lightning CD', 'estimator': cd_classifier},
]
start = time.time()
for classifier in classifiers:
print(classifier['name'])
clf = classifier['estimator']
clf.fit(X, y)
print("Training time", time.time() - start)
print("Accuracy", np.mean(clf.predict(X) == y))
n_nz = np.sum(np.sum(clf.coef_ != 0, axis=0, dtype=bool))
n_nz /= clf.coef_.size
print(clf.coef_)
print('Non-zero', n_nz)
|
bsd-2-clause
| 7,691,973,709,971,517,000 | 33.046875 | 68 | 0.490133 | false | 3.947464 | false | false | false |
chatziko/mopidy-youtube
|
mopidy_youtube/backend.py
|
1
|
5534
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
import string
from multiprocessing.pool import ThreadPool
from urlparse import urlparse, parse_qs
import unicodedata
import pafy
from mopidy import backend
from mopidy.models import SearchResult, Track, Album
import pykka
import requests
from mopidy_youtube import logger
yt_api_endpoint = 'https://www.googleapis.com/youtube/v3/'
yt_key = 'AIzaSyAl1Xq9DwdE_KD4AtPaE4EJl3WZe2zCqg4'
session = requests.Session()
def resolve_track(track, stream=False):
logger.debug("Resolving Youtube for track '%s'", track)
if hasattr(track, 'uri'):
return resolve_url(track.comment, stream)
else:
return resolve_url(track.split('.')[-1], stream)
def safe_url(uri):
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
safe_uri = unicodedata.normalize(
'NFKD',
unicode(uri)
).encode('ASCII', 'ignore')
return re.sub(
'\s+',
' ',
''.join(c for c in safe_uri if c in valid_chars)
).strip()
def resolve_url(url, stream=False):
try:
video = pafy.new(url)
if not stream:
uri = 'youtube:video/%s.%s' % (
safe_url(video.title), video.videoid
)
else:
uri = video.getbestaudio()
if not uri: # get video url
uri = video.getbest()
logger.debug('%s - %s %s %s' % (
video.title, uri.bitrate, uri.mediatype, uri.extension))
uri = uri.url
if not uri:
return
except Exception as e:
# Video is private or doesn't exist
logger.info(e.message)
return
track = Track(
name=video.title,
comment=video.videoid,
length=video.length * 1000,
album=Album(
name='Youtube',
images=[video.bigthumb, video.bigthumbhd]
),
uri=uri
)
return track
def search_youtube(q):
query = {
'part': 'id',
'maxResults': 15,
'type': 'video',
'q': q,
'key': yt_key
}
result = session.get(yt_api_endpoint+'search', params=query)
data = result.json()
resolve_pool = ThreadPool(processes=16)
playlist = [item['id']['videoId'] for item in data['items']]
playlist = resolve_pool.map(resolve_url, playlist)
resolve_pool.close()
return [item for item in playlist if item]
def resolve_playlist(url):
resolve_pool = ThreadPool(processes=16)
logger.info("Resolving Youtube-Playlist '%s'", url)
playlist = []
page = 'first'
while page:
params = {
'playlistId': url,
'maxResults': 50,
'key': yt_key,
'part': 'contentDetails'
}
if page and page != "first":
logger.debug("Get Youtube-Playlist '%s' page %s", url, page)
params['pageToken'] = page
result = session.get(yt_api_endpoint+'playlistItems', params=params)
data = result.json()
page = data.get('nextPageToken')
for item in data["items"]:
video_id = item['contentDetails']['videoId']
playlist.append(video_id)
playlist = resolve_pool.map(resolve_url, playlist)
resolve_pool.close()
return [item for item in playlist if item]
class YoutubeBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(YoutubeBackend, self).__init__()
self.config = config
self.library = YoutubeLibraryProvider(backend=self)
self.playback = YoutubePlaybackProvider(audio=audio, backend=self)
self.uri_schemes = ['youtube', 'yt']
class YoutubeLibraryProvider(backend.LibraryProvider):
def lookup(self, track):
if 'yt:' in track:
track = track.replace('yt:', '')
if 'youtube.com' in track:
url = urlparse(track)
req = parse_qs(url.query)
if 'list' in req:
return resolve_playlist(req.get('list')[0])
else:
return [resolve_url(track)]
else:
return [resolve_url(track)]
def search(self, query=None, uris=None, exact=False):
# TODO Support exact search
if not query:
return
if 'uri' in query:
search_query = ''.join(query['uri'])
url = urlparse(search_query)
if 'youtube.com' in url.netloc:
req = parse_qs(url.query)
if 'list' in req:
return SearchResult(
uri='youtube:search',
tracks=resolve_playlist(req.get('list')[0])
)
else:
logger.info(
"Resolving Youtube for track '%s'", search_query)
return SearchResult(
uri='youtube:search',
tracks=[resolve_url(search_query)]
)
else:
search_query = ' '.join(query.values()[0])
logger.info("Searching Youtube for query '%s'", search_query)
return SearchResult(
uri='youtube:search',
tracks=search_youtube(search_query)
)
class YoutubePlaybackProvider(backend.PlaybackProvider):
def translate_uri(self, uri):
track = resolve_track(uri, True)
if track is not None:
return track.uri
else:
return None
|
apache-2.0
| 1,411,562,814,534,946,800 | 27.822917 | 76 | 0.555114 | false | 3.922041 | false | false | false |
DucQuang1/py-earth
|
setup.py
|
1
|
3348
|
from setuptools import setup, Extension
import numpy
import sys
import os
sys.path.insert(0, os.path.join('.', 'pyearth'))
from _version import __version__
# Determine whether to use Cython
if '--cythonize' in sys.argv:
cythonize_switch = True
del sys.argv[sys.argv.index('--cythonize')]
else:
cythonize_switch = False
# Find all includes
local_inc = 'pyearth'
numpy_inc = numpy.get_include()
# Set up the ext_modules for Cython or not, depending
if cythonize_switch:
from Cython.Distutils import build_ext
from Cython.Build import cythonize
ext_modules = cythonize(
[Extension(
"pyearth._util", ["pyearth/_util.pyx"], include_dirs=[numpy_inc]),
Extension(
"pyearth._basis",
["pyearth/_basis.pyx"],
include_dirs=[numpy_inc]),
Extension(
"pyearth._record",
["pyearth/_record.pyx"],
include_dirs=[numpy_inc]),
Extension(
"pyearth._pruning",
["pyearth/_pruning.pyx"],
include_dirs=[local_inc,
numpy_inc]),
Extension(
"pyearth._forward",
["pyearth/_forward.pyx"],
include_dirs=[local_inc,
numpy_inc]),
Extension(
"pyearth._types",
["pyearth/_types.pyx"],
include_dirs=[local_inc,
numpy_inc])
])
else:
ext_modules = [Extension(
"pyearth._util", ["pyearth/_util.c"], include_dirs=[numpy_inc]),
Extension(
"pyearth._basis",
["pyearth/_basis.c"],
include_dirs=[numpy_inc]),
Extension(
"pyearth._record",
["pyearth/_record.c"],
include_dirs=[numpy_inc]),
Extension(
"pyearth._pruning",
["pyearth/_pruning.c"],
include_dirs=[local_inc,
numpy_inc]),
Extension(
"pyearth._forward",
["pyearth/_forward.c"],
include_dirs=[local_inc,
numpy_inc]),
Extension(
"pyearth._types",
["pyearth/_types.c"],
include_dirs=[local_inc,
numpy_inc])
]
# Create a dictionary of arguments for setup
setup_args = {'name': 'py-earth',
'version': __version__,
'author': 'Jason Rudy',
'author_email': '[email protected]',
'packages': ['pyearth', 'pyearth.test',
'pyearth.test.basis', 'pyearth.test.record'],
'license': 'LICENSE.txt',
'description':
'A Python implementation of Jerome Friedman\'s MARS algorithm.',
'long_description': open('README.md', 'r').read(),
'py_modules': ['pyearth.earth', 'pyearth._version'],
'ext_modules': ext_modules,
'classifiers': ['Development Status :: 3 - Alpha'],
'requires': ['numpy', 'scipy'],
'install_requires': ['scikit-learn >= 0.16',
'sphinx_gallery']}
# Add the build_ext command only if cythonizing
if cythonize_switch:
setup_args['cmdclass'] = {'build_ext': build_ext}
# Finally
setup(**setup_args)
|
bsd-3-clause
| -5,795,489,001,214,014,000 | 32.148515 | 78 | 0.506272 | false | 3.995227 | false | false | false |
biddellns/sc2league-server
|
config/urls.py
|
1
|
1043
|
"""sc2league URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from rest_framework import routers
from sc2league_server.seasons import views as season_views
router = routers.DefaultRouter()
router.register('seasons', season_views.SeasonViewSet)
urlpatterns = [
url('^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
|
gpl-3.0
| -8,316,606,483,788,474,000 | 32.645161 | 82 | 0.71908 | false | 3.559727 | false | false | false |
concurrentlabs/laguna
|
1.0/scripts/cachequery/cache_query.py
|
1
|
2571
|
#!/usr/bin/python
import io
import os
import signal
import sys
from subprocess import call
hit = miss = bypass = expired = 0
#########################################################################################
#########################################################################################
def main():
try:
s = '/var/log/nginx/cache.log'
istream = io.open(s, 'r')
except IOError:
print 'ERROR: failed to open %s' % s
exit(-1)
try:
open("/var/run/cache_query.pid","wb").write('%d' % os.getpid())
except IOError:
print 'ERROR: failed to open %s' % s
exit(-1)
signal.signal(signal.SIGINT, reset_stats)
global hit, miss, bypass, expired
istream.seek(0, 2)
banner()
print_stat_line()
while 1:
for s in istream.readlines():
l = s.split(' ')
if l[2] == 'HIT':
hit += 1
print_stat_line()
elif l[2] == 'MISS':
miss += 1
print_stat_line()
elif l[2] == 'BYPASS':
bypass += 1
print_stat_line()
elif l[2] == 'EXPIRED':
expired += 1
print_stat_line()
### ??? ###
# expired = 0
exit(0)
#########################################################################################
# reset stats
#########################################################################################
def reset_stats(sig, stack):
# print "reset_stats fired."
global hit, miss, bypass, expired
hit = miss = bypass = expired = 0
banner()
print_stat_line()
return
#########################################################################################
#########################################################################################
def banner():
call(["clear"])
print '\n Cache Statistics\n ================\n'
return
#########################################################################################
#########################################################################################
def print_stat_line():
global hit, miss, bypass, expired
sys.stdout.write(' HIT: %5d MISS: %5d BYPASS: %5d EXPIRED: %5d\n'
% (hit, miss, bypass, expired))
return
#########################################################################################
#########################################################################################
if __name__ == "__main__":
main()
|
apache-2.0
| -3,529,379,872,526,363,000 | 30.353659 | 89 | 0.318164 | false | 5.111332 | false | false | false |
bank-netforce/netforce
|
netforce_general/netforce_general/models/user_group.py
|
1
|
1497
|
# Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from netforce.model import Model, fields
class Group(Model):
_name = "user.group"
_string = "Group"
_key = ["name"]
_fields = {
"name": fields.Char("Group Name", required=True, search=True),
"users": fields.Many2Many("base.user", "Users"),
"comments": fields.One2Many("message", "related_id", "Comments"),
}
_order = "name"
Group.register()
|
mit
| 9,020,449,533,256,077,000 | 41.771429 | 80 | 0.727455 | false | 4.112637 | false | false | false |
tkchafin/scripts
|
phylip2biNumNex.py
|
1
|
8129
|
#!/usr/bin/python
import re
import sys
import os
import getopt
import operator
import random
def main():
params = parseArgs()
if params.phylip:
#Get sequences as dict of lists
seqs = readPhylip(params.phylip)
#get list of columns and list of samplenames
alen = getSeqLen(seqs)
columns = [[]for i in range(alen)]
names = list()
for key, value in seqs.items():
names.append(key)
for i, nuc in enumerate(value):
columns[i].append(nuc)
#For each column, delete those which are not bi-allelic
dels=list()
for i, col in enumerate(columns):
if not isBiallelic(col):
dels.append(i)
#print(i,"not biallelic:",col)
print("Deleting",len(dels),"non-biallelic columns.")
for col in sorted(dels,reverse=True): #reverse sorted so subsequent deletes aren't thrown off
#print(col,":",columns[col])
del columns[col]
#Then, convert to 012 format
print("Converting to 012 format...")
formatted = [[]for i in range(alen-len(dels))]
for i, col in enumerate(columns):
#print(col)
#print(nucs2numeric(col))
if params.nohet:
formatted[i] = nucs2numericNohet(col)
else:
formatted[i] = nucs2numeric(col)
#sys.exit()
final_data = dict()
for i, samp in enumerate(names):
seqs = list()
for k,nuc in enumerate(formatted):
seqs.append(nuc[i])
final_data[samp] = "".join(seqs)
print("Writing NEXUS output file...")
dict2nexus(params.out, final_data)
else:
print("No input provided.")
sys.exit(1)
#Function takes biallelic list of nucleotides and converts to numeric
#0 = major allele
#1 = minor allele
#2 = het
#? = - or N
def nucs2numeric(nucs):
if isBiallelic(nucs):
#print(nucs)
ret = list()
counts = {"A":0, "G":0, "C":0, "T":0}
#find major allele
for nuc in nucs:
if nuc not in ("-", "N"):
for exp in get_iupac_caseless(nuc):
counts[exp] += 1
#sort dict, to list of tuples (b/c dicts are orderless, can't keep as dict)
sorted_x = sorted(counts.items(), key=operator.itemgetter(1), reverse=True)
majA = sorted_x[0][0]
minA = sorted_x[1][0]
het = reverse_iupac(''.join(sorted(set([majA, minA])))) #get het code
#print(majA, minA, het)
for nuc in nucs:
nuc = nuc.upper()
if nuc == majA:
ret.append("0")
elif nuc == minA:
ret.append("1")
elif nuc == het:
ret.append("2")
elif nuc == "-":
ret.append("-")
else:
ret.append("?")
return(ret)
else:
print("Warning: Data is not biallelic:",nucs)
return(None)
#Function takes biallelic list of nucleotides and converts to numeric
#0 = major allele
#1 = minor allele
#2: Randomly samples heterozygous sites as 0 or 1
def nucs2numericNohet(nucs):
if isBiallelic(nucs):
#print(nucs)
ret = list()
counts = {"A":0, "G":0, "C":0, "T":0}
#find major allele
for nuc in nucs:
if nuc not in ("-", "N"):
for exp in get_iupac_caseless(nuc):
counts[exp] += 1
#sort dict, to list of tuples (b/c dicts are orderless, can't keep as dict)
sorted_x = sorted(counts.items(), key=operator.itemgetter(1), reverse=True)
majA = sorted_x[0][0]
minA = sorted_x[1][0]
het = reverse_iupac(''.join(sorted(set([majA, minA])))) #get het code
#print(majA, minA, het)
for nuc in nucs:
nuc = nuc.upper()
if nuc == majA:
ret.append("0")
elif nuc == minA:
ret.append("1")
elif nuc == het:
ret.append(random.randint(0,1))
elif nuc == "-":
ret.append("-")
else:
ret.append("?")
return(ret)
else:
print("Warning: Data is not biallelic:",nucs)
return(None)
#Function to translate a string of bases to an iupac ambiguity code
def reverse_iupac(char):
char = char.upper()
iupac = {
'A':'A',
'N':'N',
'-':'-',
'C':'C',
'G':'G',
'T':'T',
'AG':'R',
'CT':'Y',
'AC':'M',
'GT':'K',
'AT':'W',
'CG':'S',
'CGT':'B',
'AGT':'D',
'ACT':'H',
'ACG':'V',
'ACGT':'N'
}
return iupac[char]
#Function takes a list of nucleotides, and returns True if the column is biallelic
#ignores gaps and Ns
#expands uipac codes using a call to external function
def isBiallelic(nucs):
expanded = list()
for nuc in nucs:
if nuc not in ("-", "N"):
for exp in get_iupac_caseless(nuc):
expanded.append(exp)
uniq_sort = sorted(set(expanded))
if len(uniq_sort) != 2:
#print(nucs)
#print(uniq_sort, len(uniq_sort))
return(False)
else:
return(True)
#Function to split character to IUPAC codes, assuing diploidy
def get_iupac_caseless(char):
if char.islower():
char = char.upper()
iupac = {
"A" : ["A"],
"G" : ["G"],
"C" : ["C"],
"T" : ["T"],
"N" : ["A", "C", "G", "T"],
"-" : ["-"],
"R" : ["A","G"],
"Y" : ["C","T"],
"S" : ["G","C"],
"W" : ["A","T"],
"K" : ["G","T"],
"M" : ["A","C"],
"B" : ["C","G","T"],
"D" : ["A","G","T"],
"H" : ["A","C","T"],
"V" : ["A","C","G"]
}
ret = iupac[char]
return ret
#Function to read a phylip file. Returns dict (key=sample) of lists (sequences divided by site)
def readPhylip(phy):
if os.path.exists(phy):
with open(phy, 'r') as fh:
try:
num=0
ret = dict()
for line in fh:
line = line.strip()
if not line:
continue
num += 1
if num == 1:
continue
arr = line.split()
ret[arr[0]] = list(arr[1])
return(ret)
except IOError:
print("Could not read file ",fas)
sys.exit(1)
finally:
fh.close()
else:
raise FileNotFoundError("File %s not found!"%fas)
#Function to write an alignment as DICT to NEXUS
def dict2nexus(nex, aln):
with open(nex, 'w') as fh:
try:
slen = getSeqLen(aln)
header = "#NEXUS\n\nBegin data;\nDimensions ntax=" + str(len(aln)) + " nchar=" + str(slen) + ";\n"
header = header + "Format datatype=dna symbols=\"012\" missing=? gap=-;\nMatrix\n\n"
fh.write(header)
for seq in aln:
sline = str(seq) + " " + aln[seq] + "\n"
fh.write(sline)
last = ";\nEnd;\n"
fh.write(last)
except IOError:
print("Could not read file ",nex)
sys.exit(1)
finally:
fh.close()
#Goes through a dict of sequences and get the alignment length
def getSeqLen(aln):
length = None
for key in aln:
if not length:
length = len(aln[key])
else:
if length != len(aln[key]):
print("getSeqLen: Alignment contains sequences of multiple lengths.")
return(length)
#Object to parse command-line arguments
class parseArgs():
def __init__(self):
#Define options
try:
options, remainder = getopt.getopt(sys.argv[1:], 'p:ho:n', \
["phylip=","phy=","out=","nohet"])
except getopt.GetoptError as err:
print(err)
self.display_help("\nExiting because getopt returned non-zero exit status.")
#Default values for params
#Input params
self.phylip=None
self.out="out.nex"
self.nohet=False
#First pass to see if help menu was called
for o, a in options:
if o in ("-h", "-help", "--help"):
self.display_help("Exiting because help menu was called.")
#Second pass to set all args.
for opt, arg_raw in options:
arg = arg_raw.replace(" ","")
arg = arg.strip()
opt = opt.replace("-","")
#print(opt,arg)
if opt in ('p', 'phylip', 'phy'):
self.phylip = arg
elif opt in ('h', 'help'):
pass
elif opt in ('o','out'):
self.out = arg
elif opt in ('n','nohet'):
self.nohet=True
else:
assert False, "Unhandled option %r"%opt
#Check manditory options are set
if not self.phylip:
self.display_help("Error: Missing required phylip file (-p, --phylip)")
def display_help(self, message=None):
if message is not None:
print ("\n",message)
print ("\nphylip2biNumNex.py\n")
print ("Contact:Tyler K. Chafin, University of Arkansas,[email protected]")
print ("\nUsage: ", sys.argv[0], "-p /path/to/phylip \n")
print ("Description: Converts PHYLIP file to NEXUS file of only bi-allelic markers, coded with 012. As inputs for PhyloNetworks MLE_biMarkers or SNAPP")
print("""
Arguments:
-p,--popmap : Path to tab-delimited population map
-o,--out : Output file name <default = out.nex>
-n,--nohet : Randomly sample one allele from all heterozygous sites
-h,--help : Displays help menu
""")
sys.exit()
#Call main function
if __name__ == '__main__':
main()
|
gpl-3.0
| 3,747,132,566,112,011,300 | 23.558912 | 154 | 0.615451 | false | 2.614667 | false | false | false |
MrHarcombe/python-gpiozero
|
gpiozero/pins/data.py
|
1
|
53566
|
from __future__ import (
unicode_literals,
absolute_import,
print_function,
division,
)
str = type('')
import os
import sys
from textwrap import dedent
from itertools import cycle
from operator import attrgetter
from collections import namedtuple
from ..exc import PinUnknownPi, PinMultiplePins, PinNoPins, PinInvalidPin
# Some useful constants for describing pins
V1_8 = '1V8'
V3_3 = '3V3'
V5 = '5V'
GND = 'GND'
NC = 'NC' # not connected
GPIO0 = 'GPIO0'
GPIO1 = 'GPIO1'
GPIO2 = 'GPIO2'
GPIO3 = 'GPIO3'
GPIO4 = 'GPIO4'
GPIO5 = 'GPIO5'
GPIO6 = 'GPIO6'
GPIO7 = 'GPIO7'
GPIO8 = 'GPIO8'
GPIO9 = 'GPIO9'
GPIO10 = 'GPIO10'
GPIO11 = 'GPIO11'
GPIO12 = 'GPIO12'
GPIO13 = 'GPIO13'
GPIO14 = 'GPIO14'
GPIO15 = 'GPIO15'
GPIO16 = 'GPIO16'
GPIO17 = 'GPIO17'
GPIO18 = 'GPIO18'
GPIO19 = 'GPIO19'
GPIO20 = 'GPIO20'
GPIO21 = 'GPIO21'
GPIO22 = 'GPIO22'
GPIO23 = 'GPIO23'
GPIO24 = 'GPIO24'
GPIO25 = 'GPIO25'
GPIO26 = 'GPIO26'
GPIO27 = 'GPIO27'
GPIO28 = 'GPIO28'
GPIO29 = 'GPIO29'
GPIO30 = 'GPIO30'
GPIO31 = 'GPIO31'
GPIO32 = 'GPIO32'
GPIO33 = 'GPIO33'
GPIO34 = 'GPIO34'
GPIO35 = 'GPIO35'
GPIO36 = 'GPIO36'
GPIO37 = 'GPIO37'
GPIO38 = 'GPIO38'
GPIO39 = 'GPIO39'
GPIO40 = 'GPIO40'
GPIO41 = 'GPIO41'
GPIO42 = 'GPIO42'
GPIO43 = 'GPIO43'
GPIO44 = 'GPIO44'
GPIO45 = 'GPIO45'
# Board layout ASCII art
REV1_BOARD = """\
{style:white on green}+------------------{style:black on white}| |{style:white on green}--{style:on cyan}| |{style:on green}------+{style:reset}
{style:white on green}| {P1:{style} col2}{style:white on green} P1 {style:black on yellow}|C|{style:white on green} {style:on cyan}|A|{style:on green} |{style:reset}
{style:white on green}| {P1:{style} col1}{style:white on green} {style:black on yellow}+-+{style:white on green} {style:on cyan}+-+{style:on green} |{style:reset}
{style:white on green}| |{style:reset}
{style:white on green}| {style:on black}+---+{style:on green} {style:black on white}+===={style:reset}
{style:white on green}| {style:on black}|SoC|{style:on green} {style:black on white}| USB{style:reset}
{style:white on green}| {style:on black}|D|{style:on green} {style:bold}Pi Model{style:normal} {style:on black}+---+{style:on green} {style:black on white}+===={style:reset}
{style:white on green}| {style:on black}|S|{style:on green} {style:bold}{model:3s}V{pcb_revision:3s}{style:normal} |{style:reset}
{style:white on green}| {style:on black}|I|{style:on green} {style:on black}|C|{style:black on white}+======{style:reset}
{style:white on green}| {style:on black}|S|{style:black on white}| Net{style:reset}
{style:white on green}| {style:on black}|I|{style:black on white}+======{style:reset}
{style:black on white}=pwr{style:on green} {style:on white}|HDMI|{style:white on green} |{style:reset}
{style:white on green}+----------------{style:black on white}| |{style:white on green}----------+{style:reset}"""
REV2_BOARD = """\
{style:white on green}+------------------{style:black on white}| |{style:white on green}--{style:on cyan}| |{style:on green}------+{style:reset}
{style:white on green}| {P1:{style} col2}{style:white on green} P1 {style:black on yellow}|C|{style:white on green} {style:on cyan}|A|{style:on green} |{style:reset}
{style:white on green}| {P1:{style} col1}{style:white on green} {style:black on yellow}+-+{style:white on green} {style:on cyan}+-+{style:on green} |{style:reset}
{style:white on green}| {P5:{style} col1}{style:white on green} |{style:reset}
{style:white on green}| P5 {P5:{style} col2}{style:white on green} {style:on black}+---+{style:on green} {style:black on white}+===={style:reset}
{style:white on green}| {style:on black}|SoC|{style:on green} {style:black on white}| USB{style:reset}
{style:white on green}| {style:on black}|D|{style:on green} {style:bold}Pi Model{style:normal} {style:on black}+---+{style:on green} {style:black on white}+===={style:reset}
{style:white on green}| {style:on black}|S|{style:on green} {style:bold}{model:3s}V{pcb_revision:3s}{style:normal} |{style:reset}
{style:white on green}| {style:on black}|I|{style:on green} {style:on black}|C|{style:black on white}+======{style:reset}
{style:white on green}| {style:on black}|S|{style:black on white}| Net{style:reset}
{style:white on green}| {style:on black}|I|{style:black on white}+======{style:reset}
{style:black on white}=pwr{style:on green} {style:on white}|HDMI|{style:white on green} |{style:reset}
{style:white on green}+----------------{style:black on white}| |{style:white on green}----------+{style:reset}"""
A_BOARD = """\
{style:white on green}+------------------{style:black on white}| |{style:white on green}--{style:on cyan}| |{style:on green}------+{style:reset}
{style:white on green}| {P1:{style} col2}{style:white on green} P1 {style:black on yellow}|C|{style:white on green} {style:on cyan}|A|{style:on green} |{style:reset}
{style:white on green}| {P1:{style} col1}{style:white on green} {style:black on yellow}+-+{style:white on green} {style:on cyan}+-+{style:on green} |{style:reset}
{style:white on green}| {P5:{style} col1}{style:white on green} |{style:reset}
{style:white on green}| P5 {P5:{style} col2}{style:white on green} {style:on black}+---+{style:on green} {style:black on white}+===={style:reset}
{style:white on green}| {style:on black}|SoC|{style:on green} {style:black on white}| USB{style:reset}
{style:white on green}| {style:on black}|D|{style:on green} {style:bold}Pi Model{style:normal} {style:on black}+---+{style:on green} {style:black on white}+===={style:reset}
{style:white on green}| {style:on black}|S|{style:on green} {style:bold}{model:3s}V{pcb_revision:3s}{style:normal} |{style:reset}
{style:white on green}| {style:on black}|I|{style:on green} {style:on black}|C|{style:on green} |{style:reset}
{style:white on green}| {style:on black}|S|{style:on green} |{style:reset}
{style:white on green}| {style:on black}|I|{style:on green} |{style:reset}
{style:black on white}=pwr{style:on green} {style:on white}|HDMI|{style:white on green} |{style:reset}
{style:white on green}+----------------{style:black on white}| |{style:white on green}----------+{style:reset}"""
BPLUS_BOARD = """\
{style:white on green},--------------------------------.{style:reset}
{style:white on green}| {J8:{style} col2}{style:white on green} J8 {style:black on white}+===={style:reset}
{style:white on green}| {J8:{style} col1}{style:white on green} {style:black on white}| USB{style:reset}
{style:white on green}| {style:black on white}+===={style:reset}
{style:white on green}| {style:bold}Pi Model {model:3s}V{pcb_revision:3s}{style:normal} |{style:reset}
{style:white on green}| {style:on black}+----+{style:on green} {style:black on white}+===={style:reset}
{style:white on green}| {style:on black}|D|{style:on green} {style:on black}|SoC |{style:on green} {style:black on white}| USB{style:reset}
{style:white on green}| {style:on black}|S|{style:on green} {style:on black}| |{style:on green} {style:black on white}+===={style:reset}
{style:white on green}| {style:on black}|I|{style:on green} {style:on black}+----+{style:on green} |{style:reset}
{style:white on green}| {style:on black}|C|{style:on green} {style:black on white}+======{style:reset}
{style:white on green}| {style:on black}|S|{style:on green} {style:black on white}| Net{style:reset}
{style:white on green}| {style:black on white}pwr{style:white on green} {style:black on white}|HDMI|{style:white on green} {style:on black}|I||A|{style:on green} {style:black on white}+======{style:reset}
{style:white on green}`-{style:black on white}| |{style:white on green}--------{style:black on white}| |{style:white on green}----{style:on black}|V|{style:on green}-------'{style:reset}"""
APLUS_BOARD = """\
{style:white on green},--------------------------.{style:reset}
{style:white on green}| {J8:{style} col2}{style:white on green} J8 |{style:reset}
{style:white on green}| {J8:{style} col1}{style:white on green} |{style:reset}
{style:white on green}| |{style:reset}
{style:white on green}| {style:bold}Pi Model {model:3s}V{pcb_revision:3s}{style:normal} |{style:reset}
{style:white on green}| {style:on black}+----+{style:on green} {style:black on white}+===={style:reset}
{style:white on green}| {style:on black}|D|{style:on green} {style:on black}|SoC |{style:on green} {style:black on white}| USB{style:reset}
{style:white on green}| {style:on black}|S|{style:on green} {style:on black}| |{style:on green} {style:black on white}+===={style:reset}
{style:white on green}| {style:on black}|I|{style:on green} {style:on black}+----+{style:on green} |{style:reset}
{style:white on green}| {style:on black}|C|{style:on green} |{style:reset}
{style:white on green}| {style:on black}|S|{style:on green} |{style:reset}
{style:white on green}| {style:black on white}pwr{style:white on green} {style:black on white}|HDMI|{style:white on green} {style:on black}|I||A|{style:on green} |{style:reset}
{style:white on green}`-{style:black on white}| |{style:white on green}--------{style:black on white}| |{style:white on green}----{style:on black}|V|{style:on green}-'{style:reset}"""
ZERO12_BOARD = """\
{style:white on green},-------------------------.{style:reset}
{style:white on green}| {J8:{style} col2}{style:white on green} J8 |{style:reset}
{style:white on green}| {J8:{style} col1}{style:white on green} |{style:reset}
{style:black on white}---+{style:white on green} {style:on black}+---+{style:on green} {style:bold}PiZero{style:normal} |{style:reset}
{style:black on white} sd|{style:white on green} {style:on black}|SoC|{style:on green} {style:bold}V{pcb_revision:3s}{style:normal} |{style:reset}
{style:black on white}---+|hdmi|{style:white on green} {style:on black}+---+{style:on green} {style:black on white}usb{style:on green} {style:black on white}pwr{style:white on green} |{style:reset}
{style:white on green}`---{style:black on white}| |{style:white on green}--------{style:black on white}| |{style:white on green}-{style:black on white}| |{style:white on green}-'{style:reset}"""
ZERO13_BOARD = """\
{style:white on green}.-------------------------.{style:reset}
{style:white on green}| {J8:{style} col2}{style:white on green} J8 |{style:reset}
{style:white on green}| {J8:{style} col1}{style:white on green} {style:black on white}|c{style:reset}
{style:black on white}---+{style:white on green} {style:on black}+---+{style:on green} {style:bold}Pi{model:6s}{style:normal}{style:black on white}|s{style:reset}
{style:black on white} sd|{style:white on green} {style:on black}|SoC|{style:on green} {style:bold}V{pcb_revision:3s}{style:normal} {style:black on white}|i{style:reset}
{style:black on white}---+|hdmi|{style:white on green} {style:on black}+---+{style:on green} {style:black on white}usb{style:on green} {style:on white}pwr{style:white on green} |{style:reset}
{style:white on green}`---{style:black on white}| |{style:white on green}--------{style:black on white}| |{style:white on green}-{style:black on white}| |{style:white on green}-'{style:reset}"""
CM_BOARD = """\
{style:white on green}+-----------------------------------------------------------------------------------------------------------------------+{style:reset}
{style:white on green}| Raspberry Pi Compute Module |{style:reset}
{style:white on green}| |{style:reset}
{style:white on green}| You were expecting more detail? Sorry, the Compute Module's a bit hard to do right now! |{style:reset}
{style:white on green}| |{style:reset}
{style:white on green}| |{style:reset}
{style:white on green}||||||||||||||||||||-||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||{style:reset}"""
# Pin maps for various board revisions and headers
REV1_P1 = {
# pin func pullup pin func pullup
1: (V3_3, False), 2: (V5, False),
3: (GPIO0, True), 4: (V5, False),
5: (GPIO1, True), 6: (GND, False),
7: (GPIO4, False), 8: (GPIO14, False),
9: (GND, False), 10: (GPIO15, False),
11: (GPIO17, False), 12: (GPIO18, False),
13: (GPIO21, False), 14: (GND, False),
15: (GPIO22, False), 16: (GPIO23, False),
17: (V3_3, False), 18: (GPIO24, False),
19: (GPIO10, False), 20: (GND, False),
21: (GPIO9, False), 22: (GPIO25, False),
23: (GPIO11, False), 24: (GPIO8, False),
25: (GND, False), 26: (GPIO7, False),
}
REV2_P1 = {
1: (V3_3, False), 2: (V5, False),
3: (GPIO2, True), 4: (V5, False),
5: (GPIO3, True), 6: (GND, False),
7: (GPIO4, False), 8: (GPIO14, False),
9: (GND, False), 10: (GPIO15, False),
11: (GPIO17, False), 12: (GPIO18, False),
13: (GPIO27, False), 14: (GND, False),
15: (GPIO22, False), 16: (GPIO23, False),
17: (V3_3, False), 18: (GPIO24, False),
19: (GPIO10, False), 20: (GND, False),
21: (GPIO9, False), 22: (GPIO25, False),
23: (GPIO11, False), 24: (GPIO8, False),
25: (GND, False), 26: (GPIO7, False),
}
REV2_P5 = {
1: (V5, False), 2: (V3_3, False),
3: (GPIO28, False), 4: (GPIO29, False),
5: (GPIO30, False), 6: (GPIO31, False),
7: (GND, False), 8: (GND, False),
}
PLUS_J8 = {
1: (V3_3, False), 2: (V5, False),
3: (GPIO2, True), 4: (V5, False),
5: (GPIO3, True), 6: (GND, False),
7: (GPIO4, False), 8: (GPIO14, False),
9: (GND, False), 10: (GPIO15, False),
11: (GPIO17, False), 12: (GPIO18, False),
13: (GPIO27, False), 14: (GND, False),
15: (GPIO22, False), 16: (GPIO23, False),
17: (V3_3, False), 18: (GPIO24, False),
19: (GPIO10, False), 20: (GND, False),
21: (GPIO9, False), 22: (GPIO25, False),
23: (GPIO11, False), 24: (GPIO8, False),
25: (GND, False), 26: (GPIO7, False),
27: (GPIO0, False), 28: (GPIO1, False),
29: (GPIO5, False), 30: (GND, False),
31: (GPIO6, False), 32: (GPIO12, False),
33: (GPIO13, False), 34: (GND, False),
35: (GPIO19, False), 36: (GPIO16, False),
37: (GPIO26, False), 38: (GPIO20, False),
39: (GND, False), 40: (GPIO21, False),
}
CM_SODIMM = {
1: (GND, False), 2: ('EMMC DISABLE N', False),
3: (GPIO0, False), 4: (NC, False),
5: (GPIO1, False), 6: (NC, False),
7: (GND, False), 8: (NC, False),
9: (GPIO2, False), 10: (NC, False),
11: (GPIO3, False), 12: (NC, False),
13: (GND, False), 14: (NC, False),
15: (GPIO4, False), 16: (NC, False),
17: (GPIO5, False), 18: (NC, False),
19: (GND, False), 20: (NC, False),
21: (GPIO6, False), 22: (NC, False),
23: (GPIO7, False), 24: (NC, False),
25: (GND, False), 26: (GND, False),
27: (GPIO8, False), 28: (GPIO28, False),
29: (GPIO9, False), 30: (GPIO29, False),
31: (GND, False), 32: (GND, False),
33: (GPIO10, False), 34: (GPIO30, False),
35: (GPIO11, False), 36: (GPIO31, False),
37: (GND, False), 38: (GND, False),
39: ('GPIO0-27 VREF', False), 40: ('GPIO0-27 VREF', False),
# Gap in SODIMM pins
41: ('GPIO28-45 VREF', False), 42: ('GPIO28-45 VREF', False),
43: (GND, False), 44: (GND, False),
45: (GPIO12, False), 46: (GPIO32, False),
47: (GPIO13, False), 48: (GPIO33, False),
49: (GND, False), 50: (GND, False),
51: (GPIO14, False), 52: (GPIO34, False),
53: (GPIO15, False), 54: (GPIO35, False),
55: (GND, False), 56: (GND, False),
57: (GPIO16, False), 58: (GPIO36, False),
59: (GPIO17, False), 60: (GPIO37, False),
61: (GND, False), 62: (GND, False),
63: (GPIO18, False), 64: (GPIO38, False),
65: (GPIO19, False), 66: (GPIO39, False),
67: (GND, False), 68: (GND, False),
69: (GPIO20, False), 70: (GPIO40, False),
71: (GPIO21, False), 72: (GPIO41, False),
73: (GND, False), 74: (GND, False),
75: (GPIO22, False), 76: (GPIO42, False),
77: (GPIO23, False), 78: (GPIO43, False),
79: (GND, False), 80: (GND, False),
81: (GPIO24, False), 82: (GPIO44, False),
83: (GPIO25, False), 84: (GPIO45, False),
85: (GND, False), 86: (GND, False),
87: (GPIO26, False), 88: ('GPIO46 1V8', False),
89: (GPIO27, False), 90: ('GPIO47 1V8', False),
91: (GND, False), 92: (GND, False),
93: ('DSI0 DN1', False), 94: ('DSI1 DP0', False),
95: ('DSI0 DP1', False), 96: ('DSI1 DN0', False),
97: (GND, False), 98: (GND, False),
99: ('DSI0 DN0', False), 100: ('DSI1 CP', False),
101: ('DSI0 DP0', False), 102: ('DSI1 CN', False),
103: (GND, False), 104: (GND, False),
105: ('DSI0 CN', False), 106: ('DSI1 DP3', False),
107: ('DSI0 CP', False), 108: ('DSI1 DN3', False),
109: (GND, False), 110: (GND, False),
111: ('HDMI CK N', False), 112: ('DSI1 DP2', False),
113: ('HDMI CK P', False), 114: ('DSI1 DN2', False),
115: (GND, False), 116: (GND, False),
117: ('HDMI D0 N', False), 118: ('DSI1 DP1', False),
119: ('HDMI D0 P', False), 120: ('DSI1 DN1', False),
121: (GND, False), 122: (GND, False),
123: ('HDMI D1 N', False), 124: (NC, False),
125: ('HDMI D1 P', False), 126: (NC, False),
127: (GND, False), 128: (NC, False),
129: ('HDMI D2 N', False), 130: (NC, False),
131: ('HDMI D2 P', False), 132: (NC, False),
133: (GND, False), 134: (GND, False),
135: ('CAM1 DP3', False), 136: ('CAM0 DP0', False),
137: ('CAM1 DN3', False), 138: ('CAM0 DN0', False),
139: (GND, False), 140: (GND, False),
141: ('CAM1 DP2', False), 142: ('CAM0 CP', False),
143: ('CAM1 DN2', False), 144: ('CAM0 CN', False),
145: (GND, False), 146: (GND, False),
147: ('CAM1 CP', False), 148: ('CAM0 DP1', False),
149: ('CAM1 CN', False), 150: ('CAM0 DN1', False),
151: (GND, False), 152: (GND, False),
153: ('CAM1 DP1', False), 154: (NC, False),
155: ('CAM1 DN1', False), 156: (NC, False),
157: (GND, False), 158: (NC, False),
159: ('CAM1 DP0', False), 160: (NC, False),
161: ('CAM1 DN0', False), 162: (NC, False),
163: (GND, False), 164: (GND, False),
165: ('USB DP', False), 166: ('TVDAC', False),
167: ('USB DM', False), 168: ('USB OTGID', False),
169: (GND, False), 170: (GND, False),
171: ('HDMI CEC', False), 172: ('VC TRST N', False),
173: ('HDMI SDA', False), 174: ('VC TDI', False),
175: ('HDMI SCL', False), 176: ('VC TMS', False),
177: ('RUN', False), 178: ('VC TDO', False),
179: ('VDD CORE', False), 180: ('VC TCK', False),
181: (GND, False), 182: (GND, False),
183: (V1_8, False), 184: (V1_8, False),
185: (V1_8, False), 186: (V1_8, False),
187: (GND, False), 188: (GND, False),
189: ('VDAC', False), 190: ('VDAC', False),
191: (V3_3, False), 192: (V3_3, False),
193: (V3_3, False), 194: (V3_3, False),
195: (GND, False), 196: (GND, False),
197: ('VBAT', False), 198: ('VBAT', False),
199: ('VBAT', False), 200: ('VBAT', False),
}
CM3_SODIMM = CM_SODIMM.copy()
CM3_SODIMM.update({
4: ('NC / SDX VREF', False),
6: ('NC / SDX VREF', False),
8: (GND, False),
10: ('NC / SDX CLK', False),
12: ('NC / SDX CMD', False),
14: (GND, False),
16: ('NC / SDX D0', False),
18: ('NC / SDX D1', False),
20: (GND, False),
22: ('NC / SDX D2', False),
24: ('NC / SDX D3', False),
88: ('HDMI HPD N 1V8', False),
90: ('EMMC EN N 1V8', False),
})
# The following data is sourced from a combination of the following locations:
#
# http://elinux.org/RPi_HardwareHistory
# http://elinux.org/RPi_Low-level_peripherals
# https://git.drogon.net/?p=wiringPi;a=blob;f=wiringPi/wiringPi.c#l807
PI_REVISIONS = {
# rev model pcb_rev released soc manufacturer ram storage usb eth wifi bt csi dsi headers board
0x2: ('B', '1.0', '2012Q1', 'BCM2835', 'Egoman', 256, 'SD', 2, 1, False, False, 1, 1, {'P1': REV1_P1}, REV1_BOARD, ),
0x3: ('B', '1.0', '2012Q3', 'BCM2835', 'Egoman', 256, 'SD', 2, 1, False, False, 1, 1, {'P1': REV1_P1}, REV1_BOARD, ),
0x4: ('B', '2.0', '2012Q3', 'BCM2835', 'Sony', 256, 'SD', 2, 1, False, False, 1, 1, {'P1': REV2_P1, 'P5': REV2_P5}, REV2_BOARD, ),
0x5: ('B', '2.0', '2012Q4', 'BCM2835', 'Qisda', 256, 'SD', 2, 1, False, False, 1, 1, {'P1': REV2_P1, 'P5': REV2_P5}, REV2_BOARD, ),
0x6: ('B', '2.0', '2012Q4', 'BCM2835', 'Egoman', 256, 'SD', 2, 1, False, False, 1, 1, {'P1': REV2_P1, 'P5': REV2_P5}, REV2_BOARD, ),
0x7: ('A', '2.0', '2013Q1', 'BCM2835', 'Egoman', 256, 'SD', 1, 0, False, False, 1, 1, {'P1': REV2_P1, 'P5': REV2_P5}, A_BOARD, ),
0x8: ('A', '2.0', '2013Q1', 'BCM2835', 'Sony', 256, 'SD', 1, 0, False, False, 1, 1, {'P1': REV2_P1, 'P5': REV2_P5}, A_BOARD, ),
0x9: ('A', '2.0', '2013Q1', 'BCM2835', 'Qisda', 256, 'SD', 1, 0, False, False, 1, 1, {'P1': REV2_P1, 'P5': REV2_P5}, A_BOARD, ),
0xd: ('B', '2.0', '2012Q4', 'BCM2835', 'Egoman', 512, 'SD', 2, 1, False, False, 1, 1, {'P1': REV2_P1, 'P5': REV2_P5}, REV2_BOARD, ),
0xe: ('B', '2.0', '2012Q4', 'BCM2835', 'Sony', 512, 'SD', 2, 1, False, False, 1, 1, {'P1': REV2_P1, 'P5': REV2_P5}, REV2_BOARD, ),
0xf: ('B', '2.0', '2012Q4', 'BCM2835', 'Qisda', 512, 'SD', 2, 1, False, False, 1, 1, {'P1': REV2_P1, 'P5': REV2_P5}, REV2_BOARD, ),
0x10: ('B+', '1.2', '2014Q3', 'BCM2835', 'Sony', 512, 'MicroSD', 4, 1, False, False, 1, 1, {'J8': PLUS_J8}, BPLUS_BOARD, ),
0x11: ('CM', '1.1', '2014Q2', 'BCM2835', 'Sony', 512, 'eMMC', 1, 0, False, False, 2, 2, {'SODIMM': CM_SODIMM}, CM_BOARD, ),
0x12: ('A+', '1.1', '2014Q4', 'BCM2835', 'Sony', 256, 'MicroSD', 1, 0, False, False, 1, 1, {'J8': PLUS_J8}, APLUS_BOARD, ),
0x13: ('B+', '1.2', '2015Q1', 'BCM2835', 'Egoman', 512, 'MicroSD', 4, 1, False, False, 1, 1, {'J8': PLUS_J8}, BPLUS_BOARD, ),
0x14: ('CM', '1.1', '2014Q2', 'BCM2835', 'Embest', 512, 'eMMC', 1, 0, False, False, 2, 2, {'SODIMM': CM_SODIMM}, CM_BOARD, ),
0x15: ('A+', '1.1', '2014Q4', 'BCM2835', 'Embest', 256, 'MicroSD', 1, 0, False, False, 1, 1, {'J8': PLUS_J8}, APLUS_BOARD, ),
}
# ANSI color codes, for the pretty printers (nothing comprehensive, just enough
# for our purposes)
class Style(object):
def __init__(self, color=None):
self.color = self._term_supports_color() if color is None else bool(color)
self.effects = {
'reset': 0,
'bold': 1,
'normal': 22,
}
self.colors = {
'black': 0,
'red': 1,
'green': 2,
'yellow': 3,
'blue': 4,
'magenta': 5,
'cyan': 6,
'white': 7,
'default': 9,
}
@staticmethod
def _term_supports_color():
try:
stdout_fd = sys.stdout.fileno()
except IOError:
return False
else:
is_a_tty = os.isatty(stdout_fd)
is_windows = sys.platform.startswith('win')
return is_a_tty and not is_windows
@classmethod
def from_style_content(cls, format_spec):
specs = set(format_spec.split())
style = specs & {'mono', 'color'}
content = specs - style
if len(style) > 1:
raise ValueError('cannot specify both mono and color styles')
try:
style = style.pop()
except KeyError:
style = 'color' if cls._term_supports_color() else 'mono'
if len(content) > 1:
raise ValueError('cannot specify more than one content element')
try:
content = content.pop()
except KeyError:
content = 'full'
return cls(style == 'color'), content
def __call__(self, format_spec):
specs = format_spec.split()
codes = []
fore = True
for spec in specs:
if spec == 'on':
fore = False
else:
try:
codes.append(self.effects[spec])
except KeyError:
try:
if fore:
codes.append(30 + self.colors[spec])
else:
codes.append(40 + self.colors[spec])
except KeyError:
raise ValueError('invalid format specification "%s"' % spec)
if self.color:
return '\x1b[%sm' % (';'.join(str(code) for code in codes))
else:
return ''
def __format__(self, format_spec):
if format_spec == '':
return 'color' if self.color else 'mono'
else:
return self(format_spec)
class PinInfo(namedtuple('PinInfo', (
'number',
'function',
'pull_up',
'row',
'col',
))):
"""
This class is a :func:`~collections.namedtuple` derivative used to
represent information about a pin present on a GPIO header. The following
attributes are defined:
.. attribute:: number
An integer containing the physical pin number on the header (starting
from 1 in accordance with convention).
.. attribute:: function
A string describing the function of the pin. Some common examples
include "GND" (for pins connecting to ground), "3V3" (for pins which
output 3.3 volts), "GPIO9" (for GPIO9 in the Broadcom numbering
scheme), etc.
.. attribute:: pull_up
A bool indicating whether the pin has a physical pull-up resistor
permanently attached (this is usually ``False`` but GPIO2 and GPIO3
are *usually* ``True``). This is used internally by gpiozero to raise
errors when pull-down is requested on a pin with a physical pull-up
resistor.
.. attribute:: row
An integer indicating on which row the pin is physically located in
the header (1-based)
.. attribute:: col
An integer indicating in which column the pin is physically located
in the header (1-based)
"""
__slots__ = () # workaround python issue #24931
class HeaderInfo(namedtuple('HeaderInfo', (
'name',
'rows',
'columns',
'pins',
))):
"""
This class is a :func:`~collections.namedtuple` derivative used to
represent information about a pin header on a board. The object can be used
in a format string with various custom specifications::
from gpiozero import *
print('{0}'.format(pi_info().headers['J8']))
print('{0:full}'.format(pi_info().headers['J8']))
print('{0:col2}'.format(pi_info().headers['P1']))
print('{0:row1}'.format(pi_info().headers['P1']))
`'color'` and `'mono'` can be prefixed to format specifications to force
the use of `ANSI color codes`_. If neither is specified, ANSI codes will
only be used if stdout is detected to be a tty::
print('{0:color row2}'.format(pi_info().headers['J8'])) # force use of ANSI codes
print('{0:mono row2}'.format(pi_info().headers['P1'])) # force plain ASCII
The following attributes are defined:
.. automethod:: pprint
.. attribute:: name
The name of the header, typically as it appears silk-screened on the
board (e.g. "P1" or "J8").
.. attribute:: rows
The number of rows on the header.
.. attribute:: columns
The number of columns on the header.
.. attribute:: pins
A dictionary mapping physical pin numbers to :class:`PinInfo` tuples.
.. _ANSI color codes: https://en.wikipedia.org/wiki/ANSI_escape_code
"""
__slots__ = () # workaround python issue #24931
def _func_style(self, function, style):
if function == V5:
return style('bold red')
elif function in (V3_3, V1_8):
return style('bold cyan')
elif function in (GND, NC):
return style('bold black')
elif function.startswith('GPIO') and function[4:].isdigit():
return style('bold green')
else:
return style('yellow')
def _format_full(self, style):
Cell = namedtuple('Cell', ('content', 'align', 'style'))
lines = []
for row in range(self.rows):
line = []
for col in range(self.columns):
pin = (row * self.columns) + col + 1
try:
pin = self.pins[pin]
cells = [
Cell(pin.function, '><'[col % 2], self._func_style(pin.function, style)),
Cell('(%d)' % pin.number, '><'[col % 2], ''),
]
if col % 2:
cells = reversed(cells)
line.extend(cells)
except KeyError:
line.append(Cell('', '<', ''))
lines.append(line)
cols = list(zip(*lines))
col_lens = [max(len(cell.content) for cell in col) for col in cols]
lines = [
' '.join(
'{cell.style}{cell.content:{cell.align}{width}s}{style:reset}'.format(
cell=cell, width=width, style=style)
for cell, width, align in zip(line, col_lens, cycle('><')))
for line in lines
]
return '\n'.join(lines)
def _format_pin(self, pin, style):
return ''.join((
style('on black'),
(
' ' if pin is None else
self._func_style(pin.function, style) +
('1' if pin.number == 1 else 'o')
),
style('reset')
))
def _format_row(self, row, style):
if row > self.rows:
raise ValueError('invalid row %d for header %s' % (row, self.name))
start_pin = (row - 1) * self.columns + 1
return ''.join(
self._format_pin(pin, style)
for n in range(start_pin, start_pin + self.columns)
for pin in (self.pins.get(n),)
)
def _format_col(self, col, style):
if col > self.columns:
raise ValueError('invalid col %d for header %s' % (col, self.name))
return ''.join(
self._format_pin(pin, style)
for n in range(col, self.rows * self.columns + 1, self.columns)
for pin in (self.pins.get(n),)
)
def __format__(self, format_spec):
style, content = Style.from_style_content(format_spec)
if content == 'full':
return self._format_full(style)
elif content.startswith('row') and content[3:].isdigit():
return self._format_row(int(content[3:]), style)
elif content.startswith('col') and content[3:].isdigit():
return self._format_col(int(content[3:]), style)
def pprint(self, color=None):
"""
Pretty-print a diagram of the header pins.
If *color* is ``None`` (the default, the diagram will include ANSI
color codes if stdout is a color-capable terminal). Otherwise *color*
can be set to ``True`` or ``False`` to force color or monochrome
output.
"""
print('{0:{style} full}'.format(self, style=Style(color)))
class PiBoardInfo(namedtuple('PiBoardInfo', (
'revision',
'model',
'pcb_revision',
'released',
'soc',
'manufacturer',
'memory',
'storage',
'usb',
'ethernet',
'wifi',
'bluetooth',
'csi',
'dsi',
'headers',
'board',
))):
"""
This class is a :func:`~collections.namedtuple` derivative used to
represent information about a particular model of Raspberry Pi. While it is
a tuple, it is strongly recommended that you use the following named
attributes to access the data contained within. The object can be used
in format strings with various custom format specifications::
from gpiozero import *
print('{0}'.format(pi_info()))
print('{0:full}'.format(pi_info()))
print('{0:board}'.format(pi_info()))
print('{0:specs}'.format(pi_info()))
print('{0:headers}'.format(pi_info()))
`'color'` and `'mono'` can be prefixed to format specifications to force
the use of `ANSI color codes`_. If neither is specified, ANSI codes will
only be used if stdout is detected to be a tty::
print('{0:color board}'.format(pi_info())) # force use of ANSI codes
print('{0:mono board}'.format(pi_info())) # force plain ASCII
.. _ANSI color codes: https://en.wikipedia.org/wiki/ANSI_escape_code
.. automethod:: physical_pin
.. automethod:: physical_pins
.. automethod:: pprint
.. automethod:: pulled_up
.. attribute:: revision
A string indicating the revision of the Pi. This is unique to each
revision and can be considered the "key" from which all other
attributes are derived. However, in itself the string is fairly
meaningless.
.. attribute:: model
A string containing the model of the Pi (for example, "B", "B+", "A+",
"2B", "CM" (for the Compute Module), or "Zero").
.. attribute:: pcb_revision
A string containing the PCB revision number which is silk-screened onto
the Pi (on some models).
.. note::
This is primarily useful to distinguish between the model B
revision 1.0 and 2.0 (not to be confused with the model 2B) which
had slightly different pinouts on their 26-pin GPIO headers.
.. attribute:: released
A string containing an approximate release date for this revision of
the Pi (formatted as yyyyQq, e.g. 2012Q1 means the first quarter of
2012).
.. attribute:: soc
A string indicating the SoC (`system on a chip`_) that this revision
of the Pi is based upon.
.. attribute:: manufacturer
A string indicating the name of the manufacturer (usually "Sony" but a
few others exist).
.. attribute:: memory
An integer indicating the amount of memory (in Mb) connected to the
SoC.
.. note::
This can differ substantially from the amount of RAM available
to the operating system as the GPU's memory is shared with the
CPU. When the camera module is activated, at least 128Mb of RAM
is typically reserved for the GPU.
.. attribute:: storage
A string indicating the type of bootable storage used with this
revision of Pi, e.g. "SD", "MicroSD", or "eMMC" (for the Compute
Module).
.. attribute:: usb
An integer indicating how many USB ports are physically present on
this revision of the Pi.
.. note::
This does *not* include the micro-USB port used to power the Pi.
.. attribute:: ethernet
An integer indicating how many Ethernet ports are physically present
on this revision of the Pi.
.. attribute:: wifi
A bool indicating whether this revision of the Pi has wifi built-in.
.. attribute:: bluetooth
A bool indicating whether this revision of the Pi has bluetooth
built-in.
.. attribute:: csi
An integer indicating the number of CSI (camera) ports available on
this revision of the Pi.
.. attribute:: dsi
An integer indicating the number of DSI (display) ports available on
this revision of the Pi.
.. attribute:: headers
A dictionary which maps header labels to :class:`HeaderInfo` tuples.
For example, to obtain information about header P1 you would query
``headers['P1']``. To obtain information about pin 12 on header J8 you
would query ``headers['J8'].pins[12]``.
A rendered version of this data can be obtained by using the
:class:`PiBoardInfo` object in a format string::
from gpiozero import *
print('{0:headers}'.format(pi_info()))
.. attribute:: board
An ASCII art rendition of the board, primarily intended for console
pretty-print usage. A more usefully rendered version of this data can
be obtained by using the :class:`PiBoardInfo` object in a format
string. For example::
from gpiozero import *
print('{0:board}'.format(pi_info()))
.. _system on a chip: https://en.wikipedia.org/wiki/System_on_a_chip
"""
__slots__ = () # workaround python issue #24931
@classmethod
def from_revision(cls, revision):
if revision & 0x800000:
# New-style revision, parse information from bit-pattern:
#
# MSB -----------------------> LSB
# uuuuuuuuFMMMCCCCPPPPTTTTTTTTRRRR
#
# uuuuuuuu - Unused
# F - New flag (1=valid new-style revision, 0=old-style)
# MMM - Memory size (0=256, 1=512, 2=1024)
# CCCC - Manufacturer (0=Sony, 1=Egoman, 2=Embest, 3=Sony Japan)
# PPPP - Processor (0=2835, 1=2836, 2=2837)
# TTTTTTTT - Type (0=A, 1=B, 2=A+, 3=B+, 4=2B, 5=Alpha (??), 6=CM,
# 8=3B, 9=Zero, 10=CM3, 12=Zero W)
# RRRR - Revision (0, 1, 2, etc.)
revcode_memory = (revision & 0x700000) >> 20
revcode_manufacturer = (revision & 0xf0000) >> 16
revcode_processor = (revision & 0xf000) >> 12
revcode_type = (revision & 0xff0) >> 4
revcode_revision = (revision & 0x0f)
try:
model = {
0: 'A',
1: 'B',
2: 'A+',
3: 'B+',
4: '2B',
6: 'CM',
8: '3B',
9: 'Zero',
10: 'CM3',
12: 'Zero W',
}.get(revcode_type, '???')
if model in ('A', 'B'):
pcb_revision = {
0: '1.0', # is this right?
1: '1.0',
2: '2.0',
}.get(revcode_revision, 'Unknown')
else:
pcb_revision = '1.%d' % revcode_revision
soc = {
0: 'BCM2835',
1: 'BCM2836',
2: 'BCM2837',
}.get(revcode_processor, 'Unknown')
manufacturer = {
0: 'Sony',
1: 'Egoman',
2: 'Embest',
3: 'Sony Japan',
}.get(revcode_manufacturer, 'Unknown')
memory = {
0: 256,
1: 512,
2: 1024,
}.get(revcode_memory, None)
released = {
'A': '2013Q1',
'B': '2012Q1' if pcb_revision == '1.0' else '2012Q4',
'A+': '2014Q4' if memory == 512 else '2016Q3',
'B+': '2014Q3',
'2B': '2015Q1' if pcb_revision in ('1.0', '1.1') else '2016Q3',
'CM': '2014Q2',
'3B': '2016Q1' if manufacturer in ('Sony', 'Embest') else '2016Q4',
'Zero': '2015Q4' if pcb_revision == '1.2' else '2016Q2',
'CM3': '2017Q1',
'Zero W': '2017Q1',
}.get(model, 'Unknown')
storage = {
'A': 'SD',
'B': 'SD',
'CM': 'eMMC',
'CM3': 'eMMC / off-board',
}.get(model, 'MicroSD')
usb = {
'A': 1,
'A+': 1,
'Zero': 1,
'Zero W': 1,
'B': 2,
'CM': 0,
'CM3': 1,
}.get(model, 4)
ethernet = {
'A': 0,
'A+': 0,
'Zero': 0,
'Zero W': 0,
'CM': 0,
'CM3': 0,
}.get(model, 1)
wifi = {
'3B': True,
'Zero W': True,
}.get(model, False)
bluetooth = {
'3B': True,
'Zero W': True,
}.get(model, False)
csi = {
'Zero': 0 if pcb_revision == '1.0' else 1,
'Zero W': 1,
'CM': 2,
'CM3': 2,
}.get(model, 1)
dsi = {
'Zero': 0,
'Zero W': 0,
}.get(model, csi)
headers = {
'A': {'P1': REV2_P1, 'P5': REV2_P5},
'B': {'P1': REV1_P1} if pcb_revision == '1.0' else {'P1': REV2_P1, 'P5': REV2_P5},
'CM': {'SODIMM': CM_SODIMM},
'CM3': {'SODIMM': CM3_SODIMM},
}.get(model, {'J8': PLUS_J8})
board = {
'A': A_BOARD,
'B': REV1_BOARD if pcb_revision == '1.0' else REV2_BOARD,
'A+': APLUS_BOARD,
'CM': CM_BOARD,
'CM3': CM_BOARD,
'Zero': ZERO12_BOARD if pcb_revision == '1.2' else ZERO13_BOARD,
'Zero W': ZERO13_BOARD,
}.get(model, BPLUS_BOARD)
except KeyError:
raise PinUnknownPi('unable to parse new-style revision "%x"' % revision)
else:
# Old-style revision, use the lookup table
try:
(
model,
pcb_revision,
released,
soc,
manufacturer,
memory,
storage,
usb,
ethernet,
wifi,
bluetooth,
csi,
dsi,
headers,
board,
) = PI_REVISIONS[revision]
except KeyError:
raise PinUnknownPi('unknown old-style revision "%x"' % revision)
headers = {
header: HeaderInfo(name=header, rows=max(header_data) // 2, columns=2, pins={
number: PinInfo(
number=number, function=function, pull_up=pull_up,
row=row + 1, col=col + 1)
for number, (function, pull_up) in header_data.items()
for row, col in (divmod(number, 2),)
})
for header, header_data in headers.items()
}
return cls(
'%04x' % revision,
model,
pcb_revision,
released,
soc,
manufacturer,
memory,
storage,
usb,
ethernet,
wifi,
bluetooth,
csi,
dsi,
headers,
board,
)
def physical_pins(self, function):
"""
Return the physical pins supporting the specified *function* as tuples
of ``(header, pin_number)`` where *header* is a string specifying the
header containing the *pin_number*. Note that the return value is a
:class:`set` which is not indexable. Use :func:`physical_pin` if you
are expecting a single return value.
:param str function:
The pin function you wish to search for. Usually this is something
like "GPIO9" for Broadcom GPIO pin 9, or "GND" for all the pins
connecting to electrical ground.
"""
return {
(header, pin.number)
for (header, info) in self.headers.items()
for pin in info.pins.values()
if pin.function == function
}
def physical_pin(self, function):
"""
Return the physical pin supporting the specified *function*. If no pins
support the desired *function*, this function raises :exc:`PinNoPins`.
If multiple pins support the desired *function*, :exc:`PinMultiplePins`
will be raised (use :func:`physical_pins` if you expect multiple pins
in the result, such as for electrical ground).
:param str function:
The pin function you wish to search for. Usually this is something
like "GPIO9" for Broadcom GPIO pin 9.
"""
result = self.physical_pins(function)
if len(result) > 1:
raise PinMultiplePins('multiple pins can be used for %s' % function)
elif result:
return result.pop()
else:
raise PinNoPins('no pins can be used for %s' % function)
def pulled_up(self, function):
"""
Returns a bool indicating whether a physical pull-up is attached to
the pin supporting the specified *function*. Either :exc:`PinNoPins`
or :exc:`PinMultiplePins` may be raised if the function is not
associated with a single pin.
:param str function:
The pin function you wish to determine pull-up for. Usually this is
something like "GPIO9" for Broadcom GPIO pin 9.
"""
try:
header, number = self.physical_pin(function)
except PinNoPins:
return False
else:
return self.headers[header].pins[number].pull_up
def __repr__(self):
return '{cls}({fields})'.format(
cls=self.__class__.__name__,
fields=', '.join(
(
'{name}=...' if name in ('headers', 'board') else
'{name}={value!r}').format(name=name, value=value)
for name, value in zip(self._fields, self)
)
)
def __format__(self, format_spec):
style, content = Style.from_style_content(format_spec)
if content == 'full':
return dedent("""\
{self:{style} board}
{self:{style} specs}
{self:{style} headers}"""
).format(self=self, style=style)
elif content == 'board':
kw = self._asdict()
kw.update({
name: header
for name, header in self.headers.items()
})
return self.board.format(style=style, **kw)
elif content == 'specs':
return dedent("""\
{style:bold}Revision {style:reset}: {revision}
{style:bold}SoC {style:reset}: {soc}
{style:bold}RAM {style:reset}: {memory}Mb
{style:bold}Storage {style:reset}: {storage}
{style:bold}USB ports {style:reset}: {usb} {style:yellow}(excluding power){style:reset}
{style:bold}Ethernet ports {style:reset}: {ethernet}
{style:bold}Wi-fi {style:reset}: {wifi}
{style:bold}Bluetooth {style:reset}: {bluetooth}
{style:bold}Camera ports (CSI) {style:reset}: {csi}
{style:bold}Display ports (DSI){style:reset}: {dsi}"""
).format(style=style, **self._asdict())
elif content == 'headers':
return '\n\n'.join(
dedent("""\
{style:bold}{header.name}{style:reset}:
{header:{style} full}"""
).format(header=header, style=style)
for header in sorted(self.headers.values(), key=attrgetter('name'))
)
def pprint(self, color=None):
"""
Pretty-print a representation of the board along with header diagrams.
If *color* is ``None`` (the default), the diagram will include ANSI
color codes if stdout is a color-capable terminal. Otherwise *color*
can be set to ``True`` or ``False`` to force color or monochrome
output.
"""
print('{0:{style} full}'.format(self, style=Style(color)))
def pi_info(revision=None):
"""
Returns a :class:`PiBoardInfo` instance containing information about a
*revision* of the Raspberry Pi.
:param str revision:
The revision of the Pi to return information about. If this is omitted
or ``None`` (the default), then the library will attempt to determine
the model of Pi it is running on and return information about that.
"""
if revision is None:
# The reason this import is located here is to avoid a circular
# dependency; devices->pins.local->pins.data->devices
from ..devices import Device
result = Device.pin_factory.pi_info
if result is None:
raise PinUnknownPi('The default pin_factory is not attached to a Pi')
else:
return result
else:
if isinstance(revision, bytes):
revision = revision.decode('ascii')
if isinstance(revision, str):
revision = int(revision, base=16)
else:
# be nice to people passing an int (or something numeric anyway)
revision = int(revision)
return PiBoardInfo.from_revision(revision)
|
bsd-3-clause
| -1,324,905,326,528,842,200 | 45.297321 | 212 | 0.501251 | false | 3.428005 | false | false | false |
kerfab/jdic
|
jdic/jdic.py
|
1
|
21228
|
"""
The Jdic module provides the features required to manipulate
JSON objects through a consistent API.
"""
from __future__ import unicode_literals
import json
import hashlib
import importlib
from collections import Sequence, Mapping
import json_delta
import jsonschema
from . import drivers # pylint: disable=unused-import
from . import settings
JSON_ITERABLES = [
Mapping,
Sequence
]
JSON_LEAVES = [
str,
int,
float,
bool,
type(None)
]
class MatchResult(object):
""" Wraps the results of searches and browses within Jdic objects """
# pylint: disable=too-few-public-methods
def __init__(self, **kwargs):
self._obj = {}
for k in kwargs:
setattr(self, k, kwargs[k])
self._obj[k] = kwargs[k]
def __str__(self):
return str(self._obj)
def __iter__(self):
yield from self._obj.__iter__()
def __getitem__(self, item):
return self._obj[item]
class Jdic(object):
"""
The Jdic class provides the useful operations to crawl or manipulate JSON data objects.
Do not instantiate this class directly, use the instantation wrapper function `jdic()` instead
"""
# pylint: disable=too-many-instance-attributes
_attr_whitelist = [
'count',
'index',
'copy',
'fromkeys',
'keys',
'items',
'values'
]
##
# CLASS OPERATORS
##
def __init__(self, iterable, schema=None, serializer=None, driver=None,
_parent=None, _key=None):
""" Instantiates a Generic Jdic object.
iterable: the core data to be contained within a Jdic (usually dict or list)
schema: a JSON schema which may be used for automatic validation of data
serializer: a function which might be used for custom-made data-to-JSON serialization
driver: the class which implements the driver features
_parent: used internally to attach a new Jdic to another. Within a JSON hierarchy all
iterables are Jdic objects.
_key: used internally to indicate under which key (or index) the new Jdic is attached
within its parent.
"""
# pylint: disable=protected-access
self._parent = _parent
self._key = _key
# Load / Inherit driver first
if self._parent is None:
self._driver_name = driver if driver else settings.json_path_driver
self._driver = None
else:
self._driver_name = self._parent._driver_name if driver is None else driver
self._driver = self._parent._driver if driver is None else None
if self._driver is None:
self._driver = importlib.import_module('.'+self._driver_name, 'jdic.drivers').Driver()
# Inherit parent or constructor properties
if self._parent is None:
self._path = self._driver.get_new_path()
self._serializer = serializer
self._depth = 0
else:
self._path = self._driver.add_to_path(self._parent._path, self._key)
self._serializer = self._parent._serializer if serializer is None else serializer
self._depth = self._parent._depth + 1
self._schema = schema
self._cache = {}
# Dereference or cast to strict Json
if isinstance(iterable, Jdic):
iterable = iterable._obj
self._obj = self._serialize_to_jdic(iterable, parent=self)
if self._schema:
self.validate(self._schema)
def __copy__(self):
return self.new()
def __deepcopy__(self, _):
return self.new()
def __delitem__(self, path):
# pylint: disable=protected-access
if self._driver.is_root_path(path):
if isinstance(self._obj, Mapping):
self._obj = {}
else:
self._obj = []
self._flag_modified()
return
if self._driver.is_a_path(path):
parents = self._driver.get_parent(self._obj, path)
else:
parents = [(self, path)]
for parent, key in parents:
del parent._obj[key]
parent._flag_modified()
def __eq__(self, obj):
if isinstance(obj, Jdic):
return self.checksum() == obj.checksum()
elif self._is_iterable(obj):
return self.checksum() == jdic_create(obj).checksum()
return False
def __getattr__(self, attr):
attr = getattr(self._obj, attr)
if attr not in self._attr_whitelist:
self._flag_modified()
return attr
def __getitem__(self, item):
if self._driver.is_root_path(item):
return self
if self._driver.is_a_path(item):
return self._driver.get_value_at_path(self._obj, item)
if isinstance(self._obj, Mapping):
return self._obj[str(item)]
return self._obj[int(item)]
def __iter__(self):
yield from self._obj.__iter__()
def __len__(self):
return len(self._obj)
def __setitem__(self, path, value):
# pylint: disable=protected-access
if self._driver.is_root_path(path):
if not self._is_iterable(value):
raise ValueError('Cannot reassign object to non iterable "{}"'.format(type(value)))
self._jdic_reload(value)
if self._driver.is_a_path(path):
parents = self._driver.get_parent(self._obj, path)
else:
parents = [(self, path)]
for parent, key in parents:
if self._is_iterable(value):
value = jdic_create(value, _parent=parent, _key=key)
parent._obj[key] = value
parent._flag_modified()
def __str__(self):
return self.json(sort_keys=settings.json_dump_sort_keys,
indent=settings.json_dump_indent, ensure_ascii=False)
__repr__ = __str__
##
# UNDERLYING FUNCTIONS
##
def _flag_modified(self):
# pylint: disable=protected-access
self._cache = {}
if self._parent is not None:
self._parent._flag_modified()
if self._schema:
self.validate(self._schema)
def _input_serialize(self, obj):
if self._serializer:
obj = self._serializer(obj)
elif callable(settings.serialize_custom_function):
# pylint: disable=not-callable
obj = settings.serialize_custom_function(obj)
if isinstance(obj, float) and settings.serialize_float_to_int and int(obj) == obj:
return int(obj)
if self._is_json_leaf(obj):
return obj
if isinstance(obj, Mapping):
return dict(obj)
elif isinstance(obj, Sequence):
return list(obj)
return str(obj)
def _is_iterable(self, obj):
if self._is_json_leaf(obj):
return False
for itype in JSON_ITERABLES:
if isinstance(obj, itype):
return True
return False
@staticmethod
def _is_json_leaf(obj):
""" True for int, float, str, bool, None """
for ltype in JSON_LEAVES:
if isinstance(obj, ltype):
return True
return False
@staticmethod
def _is_limit_reached(number, limit):
""" Helper function """
if limit is None:
return False
if limit < 0:
return False
if limit >= number:
return True
def _jdic_reload(self, obj):
# pylint: disable=protected-access
if isinstance(obj, Jdic):
obj = obj._obj
self._obj = self._serialize_to_jdic(obj, parent=self)
self._flag_modified()
@staticmethod
def _keys_in(obj, keys, mode):
""" Helper function """
if not isinstance(obj, Mapping):
return False
if mode == "any":
for key in keys:
if key in obj:
return True
return False
elif mode == "all":
for key in keys:
if key not in obj:
return False
return True
raise NotImplementedError(mode)
def _match(self, obj, query):
return self._driver.match(obj, query)
def _merge(self, obj, with_obj, arr_mode="replace"):
# pylint: disable=protected-access
if isinstance(obj, Jdic):
obj = obj._obj
if isinstance(with_obj, Jdic):
with_obj = with_obj._obj
if not self._is_iterable(obj) or not self._is_iterable(with_obj):
raise TypeError('Cannot merge {} with {}'.format(type(obj), type(with_obj)))
unique_t = self._unique_type(obj, with_obj)
if not unique_t:
return with_obj
if unique_t and isinstance(obj, Mapping):
obj = self._merge_dicts(obj, with_obj, arr_mode)
else:
obj = self._merge_arrays(obj, with_obj, arr_mode)
return obj
def _merge_arrays(self, arr, with_arr, mode="replace"):
if mode == "replace":
return with_arr
if mode == "append":
return arr + with_arr
if mode == "new":
for val in with_arr:
if val not in arr:
arr.append(val)
return arr
if mode == "merge":
arr_l = len(arr)
for index, val in enumerate(with_arr):
if index >= arr_l:
arr.append(val)
else:
if self._is_iterable(arr[index]) and self._is_iterable(with_arr[index]):
arr[index] = self._merge(arr[index], with_arr[index], mode)
else:
arr[index] = with_arr[index]
return arr
raise NotImplementedError('Merge array mode "{}" not implemented'.format(mode))
def _merge_dicts(self, dic, with_dic, arr_mode):
for k in with_dic:
if k not in dic:
dic[k] = with_dic[k]
else:
if self._is_iterable(dic[k]) and self._is_iterable(with_dic[k]):
dic[k] = self._merge(dic[k], with_dic[k], arr_mode)
else:
dic[k] = with_dic[k]
return dic
def _serialize_to_jdic(self, iterable, parent=None):
if isinstance(iterable, Mapping):
iterable = dict(iterable)
elif isinstance(iterable, Sequence):
iterable = list(iterable)
res = type(iterable)()
for key, val in jdic_enumerate(iterable):
if isinstance(res, dict):
key = str(key)
val = self._input_serialize(val)
if self._is_iterable(val):
val = jdic_create(val, _parent=parent, _key=key)
if isinstance(res, dict):
res[key] = val
else:
res.append(val)
return res
@staticmethod
def _unique_type(*args):
result = None
for val in args:
type_val = type(val)
if not result:
result = type_val
elif result != type_val:
return None
return result
##
# PUBLIC FUNCTIONS
##
def browse(self, sort=False, depth=None, maxdepth=None, _start=True):
"""
Iterates on each JSON entry in a recursive fashion
Arguments:
- sort: bool. If True keys in dicts are alphabetically sorted before values are yielded.
- depth: an integer between 0 and +inf. Results are only yielded at this depth.
- maxdepth: an integer between 0 and +inf. Results won't be yielded past this depth.
"""
# pylint: disable=protected-access
if maxdepth is not None and maxdepth >= 0 and self._depth > maxdepth:
return
if depth is not None and self._depth > depth:
return
parent_path = None if self._parent is None else self._parent._path
if depth is None and _start:
yield MatchResult(parent=self._parent, parent_path=parent_path, key=self._key,
value=self, path=self._path, depth=self._depth)
for key, val in jdic_enumerate(self._obj, sort=sort):
path = self._driver.add_to_path(self._path, key)
if depth is None or depth == self._depth:
yield MatchResult(parent=self, parent_path=self._path, key=key,
value=val, path=path, depth=self._depth)
if isinstance(val, Jdic):
yield from val.browse(sort=sort, depth=depth, maxdepth=maxdepth, _start=False)
def checksum(self, algo='sha256'):
""" Returns an ASCII hexadecimal checksum representing the state of the object """
if 'checksum' in self._cache:
return self._cache['checksum']
hash_ = hashlib.new(algo)
hash_.update(type(self._obj).__name__.encode('utf-8'))
for key, val in jdic_enumerate(self._obj, sort=True):
if isinstance(val, Jdic):
data = "{}:{}:{}:{}".format(type(key).__name__, key,
type(val).__name__, val.checksum())
else:
data = "{}:{}:{}:{}".format(type(key).__name__, key,
type(val).__name__, val)
hash_.update(data.encode('utf-8'))
checksum = hash_.hexdigest()
self._cache['checksum'] = checksum
return checksum
def deepness(self):
""" Returns an integer representing how deep the Jdic object is """
if 'deepness' in self._cache:
return self._cache['deepness']
deepness = 0
for val in self.browse():
if isinstance(val.value, Jdic):
depth = val.value.depth()
if depth > deepness:
deepness = depth
self._cache['deepness'] = deepness
return deepness
def depth(self):
""" Returns an integer representing the depth of the current Jdic object """
return self._depth
def diff(self, obj):
""" Returns a delta between this object and obj """
if isinstance(obj, Jdic):
obj = obj.raw()
return json_delta.diff(self.raw(), obj, verbose=False)
def enumerate(self, sort=False):
""" Yields a key, value pair with both Jdic Mappings and Sequences """
yield from jdic_enumerate(self._obj, sort=sort)
def find(self, value, limit=None, sort=False, depth=None, maxdepth=None):
""" Finds a value within the Jdic object, the search is recursive """
# pylint: disable=too-many-arguments
if limit == 0:
return
num = 0
for res in self.browse(sort=sort, depth=depth, maxdepth=maxdepth):
if res.value == value:
yield res
num += 1
if self._is_limit_reached(num, limit):
return
def find_keys(self, keys, mode="any", sort=False,
limit=None, depth=None, maxdepth=None):
""" Find one or multiple keys within the Jdic object """
# pylint: disable=too-many-arguments
if limit is not None and limit == 0:
return
if not isinstance(keys, list):
keys = [keys]
num = 0
for match in self.browse(sort=sort, depth=depth, maxdepth=maxdepth):
if isinstance(match.value, Jdic):
if self._keys_in(match.value, keys, mode):
yield match
num += 1
if limit is not None and limit == num:
return
def find_match(self, query, sort=False, limit=None, depth=None, maxdepth=None):
""" Find inner data which match the provided query """
# pylint: disable=too-many-arguments
if limit == 0 or not maxdepth == 0:
return
num = 0
for res in self.browse(sort=sort, depth=depth, maxdepth=maxdepth):
if self._match(res.value, query):
yield res
num += 1
if self._is_limit_reached(num, limit):
break
def json(self, sort_keys=False, indent=0, ensure_ascii=False):
""" Returns a string of the object in JSON format """
return json.dumps(self.raw(), sort_keys=sort_keys,
indent=indent, ensure_ascii=ensure_ascii)
def leaves(self, sort=False, depth=None, maxdepth=None):
""" Iterates recursively, raises leaves of the object only """
for res in self.browse(sort=sort, depth=depth, maxdepth=maxdepth):
if self._is_json_leaf(res.value):
yield res
def nb_leaves(self):
""" Return an integer, the number of leaves within the Jdic object """
if 'nb_leaves' in self._cache:
return self._cache['nb_leaves']
nb_leaves = 0
for _ in self.leaves():
nb_leaves += 1
self._cache['nb_leaves'] = nb_leaves
return nb_leaves
def match(self, query):
""" Returns True if the object matches against query, False otherwise """
return self._match(self._obj, query)
def merge(self, objs, arr_mode="replace"):
""" Make a deep merge of the current Jdic object with one or more objects """
if not isinstance(objs, list):
objs = [objs]
for with_obj in objs:
if (isinstance(with_obj, Mapping) and not isinstance(self._obj, Mapping)) or\
(not isinstance(with_obj, Mapping) and isinstance(self._obj, Mapping)):
raise TypeError('Cannot merge "{}" with "{}"'.format(
type(self._obj),
type(with_obj)))
result = self._merge(self._obj, with_obj, arr_mode)
self._jdic_reload(result)
return self
def new(self, _obj=None):
""" Returns a copy of the current object """
if _obj is None:
_obj = self._obj
return jdic_create(_obj, serializer=self._serializer,
driver=self._driver_name, schema=self._schema)
def parent(self, generation=1):
""" Returns the Jdic object parent of this object """
# pylint: disable=protected-access
if generation < 1:
return None
res = self._parent
while generation > 1 and res is not None:
res = res._parent
generation = generation - 1
return res
def patch(self, diff):
""" Takes a delta (from diff()) and applies it to update the object """
if not diff:
return
res = json_delta.patch(self.raw(), diff)
if self._is_iterable(res):
return self.new(res)
return res
def path(self):
""" Return the path of the current Jdic object within its hierarchy """
return self._path
def raw(self, _obj=None, _cache=False):
""" Returns a copy of the current object in basic Python types """
if _cache and 'raw' in self._cache:
return self._cache['raw']
obj = _obj if _obj else self._obj
res = type(obj)()
for key, val in jdic_enumerate(obj):
if isinstance(val, Jdic):
val = val.raw(_cache=_cache)
if isinstance(res, dict):
res[key] = val
else:
res.append(val)
self._cache['raw'] = res
return res
def validate(self, schema=None):
""" Validates the current Jdic object against a JSON schema """
if schema is not None:
return jsonschema.validate(self.raw(), schema)
elif schema is None and self._schema is not None:
return jsonschema.validate(self.raw(), self._schema)
raise ValueError('The current object is not supervised by any schema')
class JdicSequence(Jdic, Sequence):
""" A wrapper for Jdics with Sequence root types (usually list) """
class JdicMapping(Jdic, Mapping):
""" A wrapper for Jdics with Mapping root types (usually dict) """
def jdic_create(iterable, **kwargs):
""" This function returns a Jdic correctly typped according to the data root type """
if isinstance(iterable, Mapping):
return JdicMapping(iterable, **kwargs)
elif isinstance(iterable, Sequence):
return JdicSequence(iterable, **kwargs)
else:
raise ValueError('Cannot create Jdic object from "{}"'.format(type(iterable)))
def jdic_enumerate(obj, sort=False):
""" Will enumerate dicts and list in a similar fashion, to ease iterables browsing """
if isinstance(obj, Mapping):
try:
keys = sorted(obj.keys()) if sort else obj
except TypeError:
keys = sorted(dict(obj).keys()) if sort else obj
for k in keys:
yield (k, obj[k])
elif isinstance(obj, Sequence):
ind = 0
for val in obj:
yield (ind, val)
ind += 1
else:
raise TypeError('Cannot enumerate objects of type "{}"'.format(type(obj)))
|
unlicense
| 7,458,643,821,538,832,000 | 34.858108 | 99 | 0.554127 | false | 4.239664 | false | false | false |
rombie/contrail-controller
|
src/config/common/tests/test_common.py
|
1
|
42367
|
#
# Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
import sys
import gevent.monkey
gevent.monkey.patch_all()
import logging
import tempfile
import mock
from pprint import pformat
import coverage
import fixtures
import testtools
from testtools import content
from flexmock import flexmock
from webtest import TestApp
import contextlib
from netaddr import IPNetwork, IPAddress
from vnc_api.vnc_api import *
import kombu
import cfgm_common.zkclient
from cfgm_common.uve.vnc_api.ttypes import VncApiConfigLog
from cfgm_common import vnc_cgitb
from cfgm_common.utils import cgitb_hook
from test_utils import *
import bottle
bottle.catchall=False
import inspect
import novaclient
import novaclient.client
import gevent.pywsgi
import uuid
from pysandesh import sandesh_logger
def lineno():
"""Returns the current line number in our program."""
return inspect.currentframe().f_back.f_lineno
# end lineno
try:
import vnc_cfg_api_server
if not hasattr(vnc_cfg_api_server, 'main'):
from vnc_cfg_api_server import vnc_cfg_api_server
except ImportError:
vnc_cfg_api_server = 'vnc_cfg_api_server could not be imported'
try:
import to_bgp
except ImportError:
try:
from schema_transformer import to_bgp
except ImportError:
to_bgp = 'to_bgp could not be imported'
try:
import svc_monitor
if not hasattr(svc_monitor, 'main'):
from svc_monitor import svc_monitor
except ImportError:
svc_monitor = 'svc_monitor could not be imported'
try:
import device_manager
if hasattr(device_manager, 'DeviceManager'):
import dm_server
else:
from device_manager import dm_server
from device_manager import device_manager
except ImportError:
device_manager = 'device_manager could not be imported'
try:
from kube_manager import kube_manager
if not hasattr(kube_manager, 'main'):
from kube_manager import kube_manager
except ImportError:
kube_manager = 'kube_manager could not be imported'
try:
from mesos_manager import mesos_manager
if not hasattr(mesos_manager, 'main'):
from mesos_manager import mesos_manager
except ImportError:
mesos_manager = 'mesos_manager could not be imported'
def generate_conf_file_contents(conf_sections):
cfg_parser = ConfigParser.RawConfigParser()
for (section, var, val) in conf_sections:
try:
cfg_parser.add_section(section)
except ConfigParser.DuplicateSectionError:
pass
if not var:
continue
if val == '':
cfg_parser.set(section, var, 'empty')
else:
cfg_parser.set(section, var, val)
return cfg_parser
# end generate_conf_file_contents
def generate_logconf_file_contents():
cfg_parser = ConfigParser.RawConfigParser()
cfg_parser.add_section('formatters')
cfg_parser.add_section('formatter_simple')
cfg_parser.set('formatters', 'keys', 'simple')
cfg_parser.set('formatter_simple', 'format', '%(name)s:%(levelname)s: %(message)s')
cfg_parser.add_section('handlers')
cfg_parser.add_section('handler_console')
cfg_parser.add_section('handler_api_server_file')
cfg_parser.set('handlers', 'keys', 'console,api_server_file')
cfg_parser.set('handler_console', 'class', 'StreamHandler')
cfg_parser.set('handler_console', 'level', 'WARN')
cfg_parser.set('handler_console', 'args', '[]')
cfg_parser.set('handler_console', 'formatter', 'simple')
cfg_parser.set('handler_api_server_file', 'class', 'FileHandler')
cfg_parser.set('handler_api_server_file', 'level', 'INFO')
cfg_parser.set('handler_api_server_file', 'formatter', 'simple')
cfg_parser.set('handler_api_server_file', 'args', "('api_server.log',)")
cfg_parser.add_section('loggers')
cfg_parser.add_section('logger_root')
cfg_parser.add_section('logger_FakeWSGIHandler')
cfg_parser.set('loggers', 'keys', 'root,FakeWSGIHandler')
cfg_parser.set('logger_root', 'level', 'WARN')
cfg_parser.set('logger_root', 'handlers', 'console')
cfg_parser.set('logger_FakeWSGIHandler', 'level', 'INFO')
cfg_parser.set('logger_FakeWSGIHandler', 'qualname', 'FakeWSGIHandler')
cfg_parser.set('logger_FakeWSGIHandler', 'handlers', 'api_server_file')
return cfg_parser
# end generate_logconf_file_contents
def launch_kube_manager(test_id, conf_sections, kube_api_skip, event_queue,
vnc_kubernetes_config_dict=None):
args_str = ""
vnc_cgitb.enable(format='text')
wait_for_kube_manager_down()
with tempfile.NamedTemporaryFile() as conf, tempfile.NamedTemporaryFile() as logconf:
cfg_parser = generate_conf_file_contents(conf_sections)
cfg_parser.write(conf)
conf.flush()
cfg_parser = generate_logconf_file_contents()
cfg_parser.write(logconf)
logconf.flush()
args_str= ["-c", conf.name]
kube_manager.main(args_str, kube_api_skip=kube_api_skip,
event_queue=event_queue,
vnc_kubernetes_config_dict=vnc_kubernetes_config_dict)
#end launch_kube_manager
def launch_mesos_manager(test_id, conf_sections, mesos_api_skip, event_queue):
args_str = ""
vnc_cgitb.enable(format='text')
wait_for_mesos_manager_down()
with tempfile.NamedTemporaryFile() as conf, tempfile.NamedTemporaryFile() as logconf:
cfg_parser = generate_conf_file_contents(conf_sections)
cfg_parser.write(conf)
conf.flush()
cfg_parser = generate_logconf_file_contents()
cfg_parser.write(logconf)
logconf.flush()
args_str= ["-c", conf.name]
mesos_manager.main(args_str, mesos_api_skip=mesos_api_skip, event_queue=event_queue)
#end launch_mesos_manager
def retry_exc_handler(tries_remaining, exception, delay):
print >> sys.stderr, "Caught '%s', %d tries remaining, sleeping for %s seconds" % (exception, tries_remaining, delay)
# end retry_exc_handler
def retries(max_tries, delay=1, backoff=2, exceptions=(Exception,), hook=None):
def dec(func):
def f2(*args, **kwargs):
mydelay = delay
tries = range(max_tries)
tries.reverse()
for tries_remaining in tries:
try:
return func(*args, **kwargs)
except exceptions as e:
if tries_remaining > 0:
if hook is not None:
hook(tries_remaining, e, mydelay)
gevent.sleep(mydelay)
mydelay = mydelay * backoff
else:
raise
else:
break
return f2
return dec
# end retries
class VncTestApp(TestApp):
def post_json(self, *args, **kwargs):
resp = super(VncTestApp, self).post_json(*args, **kwargs)
resp.charset = 'UTF-8'
return resp
#end class VncTestApp
def create_api_server_instance(test_id, config_knobs, db='cassandra'):
ret_server_info = {}
allocated_sockets = []
ret_server_info['ip'] = socket.gethostbyname(socket.gethostname())
ret_server_info['service_port'] = get_free_port(allocated_sockets)
ret_server_info['introspect_port'] = get_free_port(allocated_sockets)
ret_server_info['admin_port'] = get_free_port(allocated_sockets)
ret_server_info['allocated_sockets'] = allocated_sockets
if db == "cassandra":
ret_server_info['greenlet'] = gevent.spawn(launch_api_server,
test_id, ret_server_info['ip'], ret_server_info['service_port'],
ret_server_info['introspect_port'], ret_server_info['admin_port'],
config_knobs)
else:
msg = ("Contrail API server does not support database backend "
"'%s'" % db)
raise NotImplementedError(msg)
block_till_port_listened(ret_server_info['ip'],
ret_server_info['service_port'])
extra_env = {'HTTP_HOST': ret_server_info['ip'],
'SERVER_PORT': str(ret_server_info['service_port'])}
api_server_obj = ret_server_info['greenlet'].api_server
ret_server_info['app'] = VncTestApp(api_server_obj.api_bottle,
extra_environ=extra_env)
ret_server_info['api_conn'] = VncApi('u', 'p',
api_server_host=ret_server_info['ip'],
api_server_port=ret_server_info['service_port'])
if FakeNovaClient.vnc_lib is None:
FakeNovaClient.vnc_lib = ret_server_info['api_conn']
ret_server_info['api_session'] = requests.Session()
adapter = requests.adapters.HTTPAdapter()
ret_server_info['api_session'].mount("http://", adapter)
ret_server_info['api_session'].mount("https://", adapter)
ret_server_info['api_server'] = api_server_obj
ret_server_info['api_server']._sandesh.set_logging_level(level="SYS_DEBUG")
return ret_server_info
# end create_api_server_instance
def destroy_api_server_instance(server_info):
server_info['greenlet'].kill()
if hasattr(server_info['api_server']._db_conn, '_msgbus'):
server_info['api_server']._db_conn._msgbus.shutdown()
vhost_url = server_info['api_server']._db_conn._msgbus._urls
FakeKombu.reset(vhost_url)
FakeNovaClient.reset()
CassandraCFs.reset()
FakeKazooClient.reset()
FakeExtensionManager.reset()
for sock in server_info['allocated_sockets']:
sock.close()
# end destroy_api_server_instance
def destroy_api_server_instance_issu(server_info):
server_info['greenlet'].kill()
server_info['api_server']._db_conn._msgbus.shutdown()
vhost_url = server_info['api_server']._db_conn._msgbus._urls
for sock in server_info['allocated_sockets']:
sock.close()
# end destroy_api_server_instance
def launch_api_server(test_id, listen_ip, listen_port, http_server_port,
admin_port, conf_sections):
kombu_mock = mock.Mock()
kombu_patch = mock.patch(
'vnc_cfg_api_server.vnc_cfg_api_server.KombuAmqpClient')
kombu_init_mock = kombu_patch.start()
kombu_init_mock.side_effect = kombu_mock
args_str = ""
args_str = args_str + "--listen_ip_addr %s " % (listen_ip)
args_str = args_str + "--listen_port %s " % (listen_port)
args_str = args_str + "--http_server_port %s " % (http_server_port)
args_str = args_str + "--admin_port %s " % (admin_port)
args_str = args_str + "--cassandra_server_list 0.0.0.0:9160 "
args_str = args_str + "--log_local "
args_str = args_str + "--log_file api_server_%s.log " %(test_id)
args_str = args_str + "--cluster_id %s " %(test_id)
vnc_cgitb.enable(format='text')
with tempfile.NamedTemporaryFile() as conf, \
tempfile.NamedTemporaryFile() as logconf:
cfg_parser = generate_conf_file_contents(conf_sections)
cfg_parser.write(conf)
conf.flush()
cfg_parser = generate_logconf_file_contents()
cfg_parser.write(logconf)
logconf.flush()
args_str = args_str + "--conf_file %s " %(conf.name)
args_str = args_str + "--logging_conf %s " %(logconf.name)
server = vnc_cfg_api_server.VncApiServer(args_str)
gevent.getcurrent().api_server = server
vnc_cfg_api_server.main(args_str, server)
# end launch_api_server
def launch_svc_monitor(cluster_id, test_id, api_server_ip, api_server_port, **extra_args):
allocated_sockets = []
args_str = ""
args_str += "--cluster_id %s " % (cluster_id)
args_str += "--api_server_ip %s " % (api_server_ip)
args_str += "--api_server_port %s " % (api_server_port)
args_str += "--http_server_port %s " % (get_free_port(allocated_sockets))
args_str += "--cassandra_server_list 0.0.0.0:9160 "
args_str += "--log_local "
args_str += "--log_file svc_monitor_%s.log " %(test_id)
args_str += "--trace_file svc_monitor_%s.err " %(test_id)
args_str += "--check_service_interval 2 "
for name, value in extra_args.items():
args_str += "--{name} {value} ".format(name=name, value=value)
svc_monitor.main(args_str)
# end launch_svc_monitor
def kill_svc_monitor(glet):
glet.kill()
svc_monitor.SvcMonitor.reset()
def kill_schema_transformer(glet):
glet.kill()
to_bgp.SchemaTransformer.destroy_instance()
def kill_device_manager(glet):
glet.kill()
dm_server.sigterm_handler()
def kill_kube_manager(glet):
glet.kill()
kube_manager.KubeNetworkManager.destroy_instance()
def kill_mesos_manager(glet):
glet.kill()
mesos_manager.MesosNetworkManager.destroy_instance()
def reinit_schema_transformer():
for obj_cls in to_bgp.DBBaseST.get_obj_type_map().values():
obj_cls.reset()
to_bgp.transformer.reinit()
def launch_schema_transformer(cluster_id, test_id, api_server_ip,
api_server_port, extra_args=None):
allocated_sockets = []
wait_for_schema_transformer_down()
args_str = ""
args_str = args_str + "--cluster_id %s " % (cluster_id)
args_str = args_str + "--api_server_ip %s " % (api_server_ip)
args_str = args_str + "--api_server_port %s " % (api_server_port)
args_str = args_str + "--http_server_port %s " % (get_free_port(allocated_sockets))
args_str = args_str + "--cassandra_server_list 0.0.0.0:9160 "
args_str = args_str + "--log_local "
args_str = args_str + "--log_file schema_transformer_%s.log " %(test_id)
args_str = args_str + "--trace_file schema_transformer_%s.err " %(test_id)
if extra_args:
args_str = args_str + (extra_args)
to_bgp.main(args_str)
# end launch_schema_transformer
def launch_device_manager(test_id, api_server_ip, api_server_port,
conf_sections=None):
kombu_mock = mock.Mock()
kombu_patch = mock.patch(
'device_manager.dm_server.KombuAmqpClient')
kombu_init_mock = kombu_patch.start()
kombu_init_mock.side_effect = kombu_mock
wait_for_device_manager_down()
allocated_sockets = []
args_str = ""
args_str = args_str + "--cluster_id %s " % (test_id)
args_str = args_str + "--api_server_ip %s " % (api_server_ip)
args_str = args_str + "--api_server_port %s " % (api_server_port)
args_str = args_str + "--http_server_port %s " % (get_free_port(allocated_sockets))
args_str = args_str + "--cassandra_server_list 0.0.0.0:9160 "
args_str = args_str + "--log_local "
args_str = args_str + "--log_file device_manager_%s.log " %(test_id)
if conf_sections is not None:
with tempfile.NamedTemporaryFile() as conf:
cfg_parser = generate_conf_file_contents(conf_sections)
cfg_parser.write(conf)
conf.flush()
args_str = args_str + "--conf_file %s " % conf.name
dm_server.main(args_str)
else:
dm_server.main(args_str)
# end launch_device_manager
@retries(5, hook=retry_exc_handler)
def wait_for_schema_transformer_up():
if not to_bgp.SchemaTransformer.get_instance():
raise Exception("ST instance is not up")
@retries(5, hook=retry_exc_handler)
def wait_for_schema_transformer_down():
if to_bgp.SchemaTransformer.get_instance():
raise Exception("ST instance is up, no new instances allowed")
@retries(5, hook=retry_exc_handler)
def wait_for_device_manager_up():
if not device_manager.DeviceManager.get_instance():
raise Exception("DM instance is not up")
@retries(5, hook=retry_exc_handler)
def wait_for_device_manager_down():
if device_manager.DeviceManager.get_instance():
raise Exception("DM instance is up, no new instances allowed")
@retries(5, hook=retry_exc_handler)
def wait_for_kube_manager_up():
if not kube_manager.KubeNetworkManager.get_instance():
raise Exception("KM instance is not up")
@retries(5, hook=retry_exc_handler)
def wait_for_kube_manager_down():
if kube_manager.KubeNetworkManager.get_instance():
raise Exception("KM instance is up, no new instances allowed")
@retries(5, hook=retry_exc_handler)
def wait_for_mesos_manager_up():
if not mesos_manager.MesosNetworkManager.get_instance():
raise Exception("MM instance is not up")
@retries(5, hook=retry_exc_handler)
def wait_for_mesos_manager_down():
if mesos_manager.MesosNetworkManager.get_instance():
raise Exception("MM instance is up, no new instances allowed")
@contextlib.contextmanager
def flexmocks(mocks):
orig_values = {}
try:
for cls, method_name, val in mocks:
kwargs = {method_name: val}
# save orig cls.method_name
orig_values[(cls, method_name)] = getattr(cls, method_name)
flexmock(cls, **kwargs)
yield
finally:
for (cls, method_name), method in orig_values.items():
setattr(cls, method_name, method)
# end flexmocks
def setup_extra_flexmock(mocks):
for (cls, method_name, val) in mocks:
kwargs = {method_name: val}
flexmock(cls, **kwargs)
# end setup_extra_flexmock
def setup_mocks(mod_attr_val_list):
# use setattr instead of flexmock because flexmocks are torndown
# after every test in stopTest whereas these mocks are needed across
# all tests in class
orig_mod_attr_val_list = []
for mod, attr, val in mod_attr_val_list:
orig_mod_attr_val_list.append(
(mod, attr, getattr(mod, attr)))
setattr(mod, attr, val)
return orig_mod_attr_val_list
#end setup_mocks
def teardown_mocks(mod_attr_val_list):
for mod, attr, val in mod_attr_val_list:
setattr(mod, attr, val)
# end teardown_mocks
@contextlib.contextmanager
def patch(target_obj, target_method_name, patched):
orig_method = getattr(target_obj, target_method_name)
def patched_wrapper(*args, **kwargs):
return patched(orig_method, *args, **kwargs)
setattr(target_obj, target_method_name, patched_wrapper)
try:
yield
finally:
setattr(target_obj, target_method_name, orig_method)
#end patch
@contextlib.contextmanager
def patch_imports(imports):
# save original, patch and restore
orig_modules = {}
mocked_modules = []
try:
for import_str, fake in imports:
cur_module = None
for mod_part in import_str.split('.'):
if not cur_module:
cur_module = mod_part
else:
cur_module += "." + mod_part
if cur_module in sys.modules:
orig_modules[cur_module] = sys.modules[cur_module]
else:
mocked_modules.append(cur_module)
sys.modules[cur_module] = fake
yield
finally:
for mod_name, mod in orig_modules.items():
sys.modules[mod_name] = mod
for mod_name in mocked_modules:
del sys.modules[mod_name]
#end patch_import
cov_handle = None
class TestCase(testtools.TestCase, fixtures.TestWithFixtures):
_HTTP_HEADERS = {
'Content-type': 'application/json; charset="UTF-8"',
}
_config_knobs = [
('DEFAULTS', '', ''),
]
mocks = [
(novaclient.client, 'Client', FakeNovaClient.initialize),
(pycassa.system_manager.Connection, '__init__',stub),
(pycassa.system_manager.SystemManager, '__new__',FakeSystemManager),
(pycassa.ConnectionPool, '__new__',FakeConnectionPool),
(pycassa.ColumnFamily, '__new__',FakeCF),
(pycassa.util, 'convert_uuid_to_time',Fake_uuid_to_time),
(kazoo.client.KazooClient, '__new__',FakeKazooClient),
(kazoo.recipe.counter.Counter, '__init__',fake_zk_counter_init),
(kazoo.recipe.counter.Counter, '_change',fake_zk_counter_change),
(kazoo.recipe.counter.Counter, 'value',fake_zk_counter_value),
(kazoo.recipe.counter.Counter, '_ensure_node',
fake_zk_counter_ensure_node),
(kazoo.handlers.gevent.SequentialGeventHandler, '__init__',stub),
(kombu.Connection, '__new__',FakeKombu.Connection),
(kombu.Exchange, '__new__',FakeKombu.Exchange),
(kombu.Queue, '__new__',FakeKombu.Queue),
(kombu.Consumer, '__new__',FakeKombu.Consumer),
(kombu.Producer, '__new__',FakeKombu.Producer),
(VncApiConfigLog, '__new__',FakeApiConfigLog),
]
def __init__(self, *args, **kwargs):
self._logger = logging.getLogger(__name__)
self._assert_till_max_tries = 600
super(TestCase, self).__init__(*args, **kwargs)
self.addOnException(self._add_detailed_traceback)
def _add_detailed_traceback(self, exc_info):
vnc_cgitb.enable(format='text')
from cStringIO import StringIO
tmp_file = StringIO()
cgitb_hook(format="text", file=tmp_file, info=exc_info)
tb_str = tmp_file.getvalue()
tmp_file.close()
self.addDetail('detailed-traceback', content.text_content(tb_str))
def _add_detail(self, detail_str):
frame = inspect.stack()[1]
self.addDetail('%s:%s ' %(frame[1],frame[2]), content.text_content(detail_str))
def _add_request_detail(self, op, url, headers=None, query_params=None,
body=None):
request_str = ' URL: ' + pformat(url) + \
' OPER: ' + pformat(op) + \
' Headers: ' + pformat(headers) + \
' Query Params: ' + pformat(query_params) + \
' Body: ' + pformat(body)
self._add_detail('Requesting: ' + request_str)
def _http_get(self, uri, query_params=None):
url = "http://%s:%s%s" % (self._api_server_ip, self._api_server_port, uri)
self._add_request_detail('GET', url, headers=self._HTTP_HEADERS,
query_params=query_params)
response = self._api_server_session.get(url, headers=self._HTTP_HEADERS,
params=query_params)
self._add_detail('Received Response: ' +
pformat(response.status_code) +
pformat(response.text))
return (response.status_code, response.text)
#end _http_get
def _http_post(self, uri, body):
url = "http://%s:%s%s" % (self._api_server_ip, self._api_server_port, uri)
self._add_request_detail('POST', url, headers=self._HTTP_HEADERS, body=body)
response = self._api_server_session.post(url, data=body,
headers=self._HTTP_HEADERS)
self._add_detail('Received Response: ' +
pformat(response.status_code) +
pformat(response.text))
return (response.status_code, response.text)
#end _http_post
def _http_delete(self, uri, body):
url = "http://%s:%s%s" % (self._api_server_ip, self._api_server_port, uri)
self._add_request_detail('DELETE', url, headers=self._HTTP_HEADERS, body=body)
response = self._api_server_session.delete(url, data=body,
headers=self._HTTP_HEADERS)
self._add_detail('Received Response: ' +
pformat(response.status_code) +
pformat(response.text))
return (response.status_code, response.text)
#end _http_delete
def _http_put(self, uri, body):
url = "http://%s:%s%s" % (self._api_server_ip, self._api_server_port, uri)
self._add_request_detail('PUT', url, headers=self._HTTP_HEADERS, body=body)
response = self._api_server_session.put(url, data=body,
headers=self._HTTP_HEADERS)
self._add_detail('Received Response: ' +
pformat(response.status_code) +
pformat(response.text))
return (response.status_code, response.text)
#end _http_put
def _create_test_objects(self, count=1, proj_obj=None):
ret_objs = []
for i in range(count):
obj_name = self.id() + '-vn-' + str(i)
obj = VirtualNetwork(obj_name, parent_obj=proj_obj)
self._add_detail('creating-object ' + obj_name)
self._vnc_lib.virtual_network_create(obj)
ret_objs.append(obj)
return ret_objs
def _create_test_object(self):
return self._create_test_objects()[0]
def _delete_test_object(self, obj):
self._vnc_lib.virtual_network_delete(id=obj.uuid)
def get_cf(self, keyspace_name, cf_name):
ks_name = '%s_%s' %(self._cluster_id, keyspace_name)
return CassandraCFs.get_cf(ks_name, cf_name)
# end get_cf
def vnc_db_has_ident(self, obj=None, id=None, type_fq_name=None):
if obj:
_type = obj.get_type()
_fq_name = obj.get_fq_name()
if id:
_type = self._vnc_lib.id_to_fq_name_type(id)
_fq_name = self._vnc_lib.id_to_fq_name(id)
if type_fq_name:
_type = type_fq_name[0]
_fq_name = type_fq_name[1]
try:
vnc_obj = self._vnc_lib._object_read(_type, _fq_name)
except NoIdError:
return None
return vnc_obj
def vnc_db_ident_has_prop(self, obj, prop_name, prop_value):
vnc_obj = self.vnc_db_has_ident(obj=obj)
if vnc_obj is None:
return False
return getattr(vnc_obj, prop_name) == prop_value
def vnc_db_ident_has_ref(self, obj, ref_name, ref_fq_name):
vnc_obj = self.vnc_db_has_ident(obj=obj)
if vnc_obj is None:
return False
refs = getattr(vnc_obj, ref_name, [])
for ref in refs:
if ref['to'] == ref_fq_name:
return True
return False
def vnc_db_doesnt_have_ident(self, obj=None, id=None, type_fq_name=None):
return not self.vnc_db_has_ident(obj=obj, id=id,
type_fq_name=type_fq_name)
def vnc_db_ident_doesnt_have_ref(self, obj, ref_name, ref_fq_name=None):
return not self.vnc_db_ident_has_ref(obj, ref_name, ref_fq_name)
def assertTill(self, expr_or_cb, *cb_args, **cb_kwargs):
tries = 0
while True:
if callable(expr_or_cb):
ret = expr_or_cb(*cb_args, **cb_kwargs)
else:
ret = eval(expr_or_cb)
if ret:
break
tries = tries + 1
if tries >= self._assert_till_max_tries:
raise Exception('Max retries')
self._logger.warn('Retrying at ' + str(inspect.stack()[1]))
gevent.sleep(0.1)
@classmethod
def setUpClass(cls, extra_mocks=None, extra_config_knobs=None,
db='cassandra'):
super(TestCase, cls).setUpClass()
global cov_handle
if not cov_handle:
cov_handle = coverage.coverage(source=['./'], omit=['.venv/*'])
#cov_handle.start()
cfgm_common.zkclient.LOG_DIR = './'
gevent.pywsgi.WSGIServer.handler_class = FakeWSGIHandler
cls.orig_mocked_values = setup_mocks(cls.mocks + (extra_mocks or []))
cls._cluster_id = cls.__name__
cls._server_info = create_api_server_instance(
cls._cluster_id, cls._config_knobs + (extra_config_knobs or []),
db=db)
try:
cls._api_server_ip = cls._server_info['ip']
cls._api_server_port = cls._server_info['service_port']
cls._api_admin_port = cls._server_info['admin_port']
cls._api_svr_greenlet = cls._server_info['greenlet']
cls._api_svr_app = cls._server_info['app']
cls._vnc_lib = cls._server_info['api_conn']
cls._api_server_session = cls._server_info['api_session']
cls._api_server = cls._server_info['api_server']
except Exception as e:
cls.tearDownClass()
raise
# end setUpClass
@classmethod
def tearDownClass(cls):
destroy_api_server_instance(cls._server_info)
teardown_mocks(cls.orig_mocked_values)
# end tearDownClass
def setUp(self, extra_mocks=None, extra_config_knobs=None):
self._logger.info("Running %s" %(self.id()))
super(TestCase, self).setUp()
# end setUp
def tearDown(self):
self._logger.info("Finished %s" %(self.id()))
self.wait_till_api_server_idle()
super(TestCase, self).tearDown()
# end tearDown
def wait_till_api_server_idle(self):
# wait for in-flight messages to be processed
if hasattr(self._api_server._db_conn, '_msgbus'):
while self._api_server._db_conn._msgbus.num_pending_messages() > 0:
gevent.sleep(0.001)
vhost_url = self._api_server._db_conn._msgbus._urls
while not FakeKombu.is_empty(vhost_url, 'vnc_config'):
gevent.sleep(0.001)
# wait_till_api_server_idle
def create_virtual_network(self, vn_name, vn_subnet='10.0.0.0/24'):
vn_obj = VirtualNetwork(name=vn_name)
ipam_fq_name = [
'default-domain', 'default-project', 'default-network-ipam']
ipam_obj = self._vnc_lib.network_ipam_read(fq_name=ipam_fq_name)
subnets = [vn_subnet] if isinstance(vn_subnet, basestring) else vn_subnet
subnet_infos = []
for subnet in subnets:
cidr = IPNetwork(subnet)
subnet_infos.append(
IpamSubnetType(
subnet=SubnetType(
str(cidr.network),
int(cidr.prefixlen),
),
default_gateway=str(IPAddress(cidr.last - 1)),
subnet_uuid=str(uuid.uuid4()),
)
)
subnet_data = VnSubnetsType(subnet_infos)
vn_obj.add_network_ipam(ipam_obj, subnet_data)
self._vnc_lib.virtual_network_create(vn_obj)
vn_obj.clear_pending_updates()
return vn_obj
# end create_virtual_network
def _create_service(self, vn_list, si_name, auto_policy,
create_right_port=True, **kwargs):
sa_set = None
if kwargs.get('service_virtualization_type') == 'physical-device':
pr = PhysicalRouter(si_name)
self._vnc_lib.physical_router_create(pr)
sa_set = ServiceApplianceSet('sa_set-'+si_name)
self._vnc_lib.service_appliance_set_create(sa_set)
sa = ServiceAppliance('sa-'+si_name, parent_obj=sa_set)
for if_type, _ in vn_list:
attr = ServiceApplianceInterfaceType(interface_type=if_type)
pi = PhysicalInterface('pi-'+si_name+if_type, parent_obj=pr)
self._vnc_lib.physical_interface_create(pi)
sa.add_physical_interface(pi, attr)
self._vnc_lib.service_appliance_create(sa)
sti = [ServiceTemplateInterfaceType(k) for k, _ in vn_list]
st_prop = ServiceTemplateType(
flavor='medium',
image_name='junk',
ordered_interfaces=True,
interface_type=sti, **kwargs)
service_template = ServiceTemplate(
name=si_name + 'template',
service_template_properties=st_prop)
if sa_set:
service_template.add_service_appliance_set(sa_set)
self._vnc_lib.service_template_create(service_template)
scale_out = ServiceScaleOutType()
if kwargs.get('service_mode') in ['in-network', 'in-network-nat']:
if_list = [ServiceInstanceInterfaceType(virtual_network=vn)
for _, vn in vn_list]
si_props = ServiceInstanceType(auto_policy=auto_policy,
interface_list=if_list,
scale_out=scale_out)
else:
if_list = [ServiceInstanceInterfaceType(),
ServiceInstanceInterfaceType()]
si_props = ServiceInstanceType(interface_list=if_list,
scale_out=scale_out)
service_instance = ServiceInstance(
name=si_name, service_instance_properties=si_props)
service_instance.add_service_template(service_template)
self._vnc_lib.service_instance_create(service_instance)
if kwargs.get('version') == 2:
proj = Project()
pt = PortTuple('pt-'+si_name, parent_obj=service_instance)
self._vnc_lib.port_tuple_create(pt)
for if_type, vn_name in vn_list:
if if_type == 'right' and not create_right_port:
continue
port = VirtualMachineInterface(si_name+if_type, parent_obj=proj)
vmi_props = VirtualMachineInterfacePropertiesType(
service_interface_type=if_type)
vn_obj = self._vnc_lib.virtual_network_read(fq_name_str=vn_name)
port.set_virtual_machine_interface_properties(vmi_props)
port.add_virtual_network(vn_obj)
port.add_port_tuple(pt)
self._vnc_lib.virtual_machine_interface_create(port)
# Let a chance to the API to create iip for the vmi of the pt
# before creating the si and the schema allocates an iip
# address to the service chain
gevent.sleep(0.1)
return service_instance.get_fq_name_str()
def create_network_policy(self, vn1, vn2, service_list=None, mirror_service=None,
auto_policy=False, create_right_port = True, **kwargs):
vn1_name = vn1 if isinstance(vn1, basestring) else vn1.get_fq_name_str()
vn2_name = vn2 if isinstance(vn2, basestring) else vn2.get_fq_name_str()
addr1 = AddressType(virtual_network=vn1_name, subnet=kwargs.get('subnet_1'))
addr2 = AddressType(virtual_network=vn2_name, subnet=kwargs.get('subnet_2'))
port = PortType(-1, 0)
service_name_list = []
si_list = service_list or []
if service_list:
for service in si_list:
service_name_list.append(self._create_service(
[('left', vn1_name), ('right', vn2_name)], service,
auto_policy, create_right_port, **kwargs))
if mirror_service:
mirror_si = self._create_service(
[('left', vn1_name), ('right', vn2_name)], mirror_service, False,
service_mode='transparent', service_type='analyzer')
action_list = ActionListType()
if mirror_service:
mirror = MirrorActionType(analyzer_name=mirror_si)
action_list.mirror_to=mirror
if service_name_list:
action_list.apply_service=service_name_list
else:
action_list.simple_action='pass'
prule = PolicyRuleType(direction="<>", protocol="any",
src_addresses=[addr1], dst_addresses=[addr2],
src_ports=[port], dst_ports=[port],
action_list=action_list)
pentry = PolicyEntriesType([prule])
np = NetworkPolicy(str(uuid.uuid4()), network_policy_entries=pentry)
if auto_policy:
return np
self._vnc_lib.network_policy_create(np)
return np
# end create_network_policy
def create_logical_router(self, name, nb_of_attached_networks=1, **kwargs):
lr = LogicalRouter(name, **kwargs)
vns = []
vmis = []
iips = []
for idx in range(nb_of_attached_networks):
# Virtual Network
vn = self.create_virtual_network('%s-network%d' % (name, idx),
'10.%d.0.0/24' % idx)
vns.append(vn)
# Virtual Machine Interface
vmi_name = '%s-network%d-vmi' % (name, idx)
vmi = VirtualMachineInterface(
vmi_name, parent_type='project',
fq_name=['default-domain', 'default-project', vmi_name])
vmi.set_virtual_machine_interface_device_owner(
'network:router_interface')
vmi.add_virtual_network(vn)
self._vnc_lib.virtual_machine_interface_create(vmi)
lr.add_virtual_machine_interface(vmi)
vmis.append(vmi)
# Instance IP
gw_ip = vn.get_network_ipam_refs()[0]['attr'].ipam_subnets[0].\
default_gateway
subnet_uuid = vn.get_network_ipam_refs()[0]['attr'].\
ipam_subnets[0].subnet_uuid
iip = InstanceIp(name='%s-network%d-iip' % (name, idx))
iip.set_subnet_uuid(subnet_uuid)
iip.set_virtual_machine_interface(vmi)
iip.set_virtual_network(vn)
iip.set_instance_ip_family('v4')
iip.set_instance_ip_address(gw_ip)
self._vnc_lib.instance_ip_create(iip)
iips.append(iip)
self._vnc_lib.logical_router_create(lr)
return lr, vns, vmis, iips
def _security_group_rule_build(self, rule_info, sg_fq_name_str):
protocol = rule_info['protocol']
port_min = rule_info['port_min'] or 0
port_max = rule_info['port_max'] or 65535
direction = rule_info['direction'] or 'ingress'
ip_prefix = rule_info['ip_prefix']
ether_type = rule_info['ether_type']
if ip_prefix:
cidr = ip_prefix.split('/')
pfx = cidr[0]
pfx_len = int(cidr[1])
endpt = [AddressType(subnet=SubnetType(pfx, pfx_len))]
else:
endpt = [AddressType(security_group=sg_fq_name_str)]
local = None
remote = None
if direction == 'ingress':
dir = '>'
local = endpt
remote = [AddressType(security_group='local')]
else:
dir = '>'
remote = endpt
local = [AddressType(security_group='local')]
if not protocol:
protocol = 'any'
if protocol.isdigit():
protocol = int(protocol)
if protocol < 0 or protocol > 255:
raise Exception('SecurityGroupRuleInvalidProtocol-%s' % protocol)
else:
if protocol not in ['any', 'tcp', 'udp', 'icmp', 'icmp6']:
raise Exception('SecurityGroupRuleInvalidProtocol-%s' % protocol)
if not ip_prefix and not sg_fq_name_str:
if not ether_type:
ether_type = 'IPv4'
sgr_uuid = str(uuid.uuid4())
rule = PolicyRuleType(rule_uuid=sgr_uuid, direction=dir,
protocol=protocol,
src_addresses=local,
src_ports=[PortType(0, 65535)],
dst_addresses=remote,
dst_ports=[PortType(port_min, port_max)],
ethertype=ether_type)
return rule
#end _security_group_rule_build
def _security_group_rule_append(self, sg_obj, sg_rule):
rules = sg_obj.get_security_group_entries()
if rules is None:
rules = PolicyEntriesType([sg_rule])
else:
for sgr in rules.get_policy_rule() or []:
sgr_copy = copy.copy(sgr)
sgr_copy.rule_uuid = sg_rule.rule_uuid
if sg_rule == sgr_copy:
raise Exception('SecurityGroupRuleExists %s' % sgr.rule_uuid)
rules.add_policy_rule(sg_rule)
sg_obj.set_security_group_entries(rules)
#end _security_group_rule_append
def _security_group_rule_remove(self, sg_obj, sg_rule):
rules = sg_obj.get_security_group_entries()
if rules is None:
raise Exception('SecurityGroupRuleNotExists %s' % sgr.rule_uuid)
else:
for sgr in rules.get_policy_rule() or []:
if sgr.rule_uuid == sg_rule.rule_uuid:
rules.delete_policy_rule(sgr)
sg_obj.set_security_group_entries(rules)
return
raise Exception('SecurityGroupRuleNotExists %s' % sg_rule.rule_uuid)
#end _security_group_rule_append
# end TestCase
class ErrorInterceptingLogger(sandesh_logger.SandeshLogger):
_exceptions = []
_other_errors = []
@classmethod
def register_exception(cls, msg, *args, **kwargs):
if 'traceback' in msg.lower():
cls._exceptions.append((msg, args, kwargs))
return True
return False
@classmethod
def register_error(cls, msg, *args, **kwargs):
if not cls.register_exception(msg, *args, **kwargs):
cls._other_errors.append((msg, args, kwargs))
@classmethod
def get_exceptions(cls):
return list(cls._exceptions)
@classmethod
def get_other_errors(cls):
return list(cls._other_errors)
@classmethod
def reset(cls):
cls._exceptions, cls._other_errors = [], []
@classmethod
def get_qualified_name(cls):
return '{module_name}.{class_name}'.format(
module_name=cls.__module__, class_name=cls.__name__)
class LoggerWrapper(object):
def __init__(self, logger):
self._logger = logger
def __getattr__(self, item):
return getattr(self._logger, item)
def error(self, msg, *args, **kwargs):
ErrorInterceptingLogger.register_error(msg, *args, **kwargs)
return self._logger.error(msg, *args, **kwargs)
def critical(self, msg, *args, **kwargs):
ErrorInterceptingLogger.register_error(msg, *args, **kwargs)
return self._logger.critical(msg, *args, **kwargs)
def exception(self, msg, *args, **kwargs):
ErrorInterceptingLogger.register_error(msg, *args, **kwargs)
return self._logger.exception(msg, *args, **kwargs)
def log(self, lvl, msg, *args, **kwargs):
ErrorInterceptingLogger.register_exception(
msg, *args, **kwargs)
return self._logger.log(lvl, msg, *args, **kwargs)
def __init__(self, *args, **kwargs):
super(ErrorInterceptingLogger, self).__init__(*args, **kwargs)
self._logger = ErrorInterceptingLogger.LoggerWrapper(
self._logger)
|
apache-2.0
| -2,208,394,981,384,609,800 | 37.410698 | 121 | 0.594897 | false | 3.600187 | true | false | false |
pytorch/vision
|
references/detection/utils.py
|
1
|
8671
|
from collections import defaultdict, deque
import datetime
import errno
import os
import time
import torch
import torch.distributed as dist
class SmoothedValue(object):
"""Track a series of values and provide access to smoothed values over a
window or the global series average.
"""
def __init__(self, window_size=20, fmt=None):
if fmt is None:
fmt = "{median:.4f} ({global_avg:.4f})"
self.deque = deque(maxlen=window_size)
self.total = 0.0
self.count = 0
self.fmt = fmt
def update(self, value, n=1):
self.deque.append(value)
self.count += n
self.total += value * n
def synchronize_between_processes(self):
"""
Warning: does not synchronize the deque!
"""
if not is_dist_avail_and_initialized():
return
t = torch.tensor([self.count, self.total], dtype=torch.float64, device='cuda')
dist.barrier()
dist.all_reduce(t)
t = t.tolist()
self.count = int(t[0])
self.total = t[1]
@property
def median(self):
d = torch.tensor(list(self.deque))
return d.median().item()
@property
def avg(self):
d = torch.tensor(list(self.deque), dtype=torch.float32)
return d.mean().item()
@property
def global_avg(self):
return self.total / self.count
@property
def max(self):
return max(self.deque)
@property
def value(self):
return self.deque[-1]
def __str__(self):
return self.fmt.format(
median=self.median,
avg=self.avg,
global_avg=self.global_avg,
max=self.max,
value=self.value)
def all_gather(data):
"""
Run all_gather on arbitrary picklable data (not necessarily tensors)
Args:
data: any picklable object
Returns:
list[data]: list of data gathered from each rank
"""
world_size = get_world_size()
if world_size == 1:
return [data]
data_list = [None] * world_size
dist.all_gather_object(data_list, data)
return data_list
def reduce_dict(input_dict, average=True):
"""
Args:
input_dict (dict): all the values will be reduced
average (bool): whether to do average or sum
Reduce the values in the dictionary from all processes so that all processes
have the averaged results. Returns a dict with the same fields as
input_dict, after reduction.
"""
world_size = get_world_size()
if world_size < 2:
return input_dict
with torch.no_grad():
names = []
values = []
# sort the keys so that they are consistent across processes
for k in sorted(input_dict.keys()):
names.append(k)
values.append(input_dict[k])
values = torch.stack(values, dim=0)
dist.all_reduce(values)
if average:
values /= world_size
reduced_dict = {k: v for k, v in zip(names, values)}
return reduced_dict
class MetricLogger(object):
def __init__(self, delimiter="\t"):
self.meters = defaultdict(SmoothedValue)
self.delimiter = delimiter
def update(self, **kwargs):
for k, v in kwargs.items():
if isinstance(v, torch.Tensor):
v = v.item()
assert isinstance(v, (float, int))
self.meters[k].update(v)
def __getattr__(self, attr):
if attr in self.meters:
return self.meters[attr]
if attr in self.__dict__:
return self.__dict__[attr]
raise AttributeError("'{}' object has no attribute '{}'".format(
type(self).__name__, attr))
def __str__(self):
loss_str = []
for name, meter in self.meters.items():
loss_str.append(
"{}: {}".format(name, str(meter))
)
return self.delimiter.join(loss_str)
def synchronize_between_processes(self):
for meter in self.meters.values():
meter.synchronize_between_processes()
def add_meter(self, name, meter):
self.meters[name] = meter
def log_every(self, iterable, print_freq, header=None):
i = 0
if not header:
header = ''
start_time = time.time()
end = time.time()
iter_time = SmoothedValue(fmt='{avg:.4f}')
data_time = SmoothedValue(fmt='{avg:.4f}')
space_fmt = ':' + str(len(str(len(iterable)))) + 'd'
if torch.cuda.is_available():
log_msg = self.delimiter.join([
header,
'[{0' + space_fmt + '}/{1}]',
'eta: {eta}',
'{meters}',
'time: {time}',
'data: {data}',
'max mem: {memory:.0f}'
])
else:
log_msg = self.delimiter.join([
header,
'[{0' + space_fmt + '}/{1}]',
'eta: {eta}',
'{meters}',
'time: {time}',
'data: {data}'
])
MB = 1024.0 * 1024.0
for obj in iterable:
data_time.update(time.time() - end)
yield obj
iter_time.update(time.time() - end)
if i % print_freq == 0 or i == len(iterable) - 1:
eta_seconds = iter_time.global_avg * (len(iterable) - i)
eta_string = str(datetime.timedelta(seconds=int(eta_seconds)))
if torch.cuda.is_available():
print(log_msg.format(
i, len(iterable), eta=eta_string,
meters=str(self),
time=str(iter_time), data=str(data_time),
memory=torch.cuda.max_memory_allocated() / MB))
else:
print(log_msg.format(
i, len(iterable), eta=eta_string,
meters=str(self),
time=str(iter_time), data=str(data_time)))
i += 1
end = time.time()
total_time = time.time() - start_time
total_time_str = str(datetime.timedelta(seconds=int(total_time)))
print('{} Total time: {} ({:.4f} s / it)'.format(
header, total_time_str, total_time / len(iterable)))
def collate_fn(batch):
return tuple(zip(*batch))
def warmup_lr_scheduler(optimizer, warmup_iters, warmup_factor):
def f(x):
if x >= warmup_iters:
return 1
alpha = float(x) / warmup_iters
return warmup_factor * (1 - alpha) + alpha
return torch.optim.lr_scheduler.LambdaLR(optimizer, f)
def mkdir(path):
try:
os.makedirs(path)
except OSError as e:
if e.errno != errno.EEXIST:
raise
def setup_for_distributed(is_master):
"""
This function disables printing when not in master process
"""
import builtins as __builtin__
builtin_print = __builtin__.print
def print(*args, **kwargs):
force = kwargs.pop('force', False)
if is_master or force:
builtin_print(*args, **kwargs)
__builtin__.print = print
def is_dist_avail_and_initialized():
if not dist.is_available():
return False
if not dist.is_initialized():
return False
return True
def get_world_size():
if not is_dist_avail_and_initialized():
return 1
return dist.get_world_size()
def get_rank():
if not is_dist_avail_and_initialized():
return 0
return dist.get_rank()
def is_main_process():
return get_rank() == 0
def save_on_master(*args, **kwargs):
if is_main_process():
torch.save(*args, **kwargs)
def init_distributed_mode(args):
if 'RANK' in os.environ and 'WORLD_SIZE' in os.environ:
args.rank = int(os.environ["RANK"])
args.world_size = int(os.environ['WORLD_SIZE'])
args.gpu = int(os.environ['LOCAL_RANK'])
elif 'SLURM_PROCID' in os.environ:
args.rank = int(os.environ['SLURM_PROCID'])
args.gpu = args.rank % torch.cuda.device_count()
else:
print('Not using distributed mode')
args.distributed = False
return
args.distributed = True
torch.cuda.set_device(args.gpu)
args.dist_backend = 'nccl'
print('| distributed init (rank {}): {}'.format(
args.rank, args.dist_url), flush=True)
torch.distributed.init_process_group(backend=args.dist_backend, init_method=args.dist_url,
world_size=args.world_size, rank=args.rank)
torch.distributed.barrier()
setup_for_distributed(args.rank == 0)
|
bsd-3-clause
| -5,394,417,501,050,074,000 | 28.39322 | 94 | 0.548149 | false | 3.846939 | false | false | false |
LowieHuyghe/edmunds
|
tests/log/drivers/testsyslog.py
|
1
|
2970
|
from tests.testcase import TestCase
from tests.foundation.syslogserver import SysLogServer
class TestSysLog(TestCase):
"""
Test the SysLog
"""
def set_up(self):
"""
Set up the test case
"""
super(TestSysLog, self).set_up()
self._server = SysLogServer()
self._server.start()
def tear_down(self):
"""
Tear down the test case
"""
super(TestSysLog, self).tear_down()
self._server.stop()
def test_sys_log(self):
"""
Test the sys log
"""
info_string = 'info_%s' % self.rand_str(20)
warning_string = 'warning_%s' % self.rand_str(20)
error_string = 'error_%s' % self.rand_str(20)
# Write config
self.write_config([
"from edmunds.log.drivers.syslog import SysLog \n",
"from logging.handlers import SysLogHandler \n",
"from logging import WARNING \n",
"APP = { \n",
" 'debug': False, \n",
" 'log': { \n",
" 'enabled': True, \n",
" 'instances': [ \n",
" { \n",
" 'name': 'syslog',\n",
" 'driver': SysLog,\n",
" 'level': WARNING,\n",
" 'address': ('%s', %i),\n" % (self._server.host, self._server.port),
" 'facility': SysLogHandler.LOG_USER,\n",
" 'socktype': None,\n",
" 'format': '%(message)s',\n",
" }, \n",
" { \n",
" 'name': 'syslog2',\n",
" 'driver': SysLog,\n",
" 'level': WARNING,\n",
" 'address': ('%s', %i),\n" % (self._server.host, self._server.port),
" }, \n",
" ], \n",
" }, \n",
"} \n",
])
# Create app
app = self.create_application()
# Add route
rule = '/' + self.rand_str(20)
@app.route(rule)
def handle_route():
app.logger.info(info_string)
app.logger.warning(warning_string)
app.logger.error(error_string)
return ''
with app.test_client() as c:
# Check syslog
self.assert_not_in(info_string, '\n'.join(self._server.get_data()))
self.assert_not_in(warning_string, '\n'.join(self._server.get_data()))
self.assert_not_in(error_string, '\n'.join(self._server.get_data()))
# Call route
c.get(rule)
# Check syslog
self.assert_not_in(info_string, '\n'.join(self._server.get_data()))
self.assert_in(warning_string, '\n'.join(self._server.get_data()))
self.assert_in(error_string, '\n'.join(self._server.get_data()))
|
apache-2.0
| 7,226,208,718,824,647,000 | 30.595745 | 95 | 0.437037 | false | 3.817481 | true | false | false |
yhteentoimivuuspalvelut/ckanext-ytp-request
|
ckanext/ytp/request/command.py
|
1
|
1325
|
import logging
from ckan.lib.cli import CkanCommand
class InitDBCommand(CkanCommand):
"""
Initialises the database with the required tables
Connects to the CKAN database and creates the member request tables
Usage:
paster initdb
- Creates the database table member request
"""
summary = __doc__.split('\n')[0]
usage = __doc__
max_args = 0
min_args = 0
def __init__(self, name):
super(InitDBCommand, self).__init__(name)
def command(self):
"""
Parse command line arguments and call appropriate method.
"""
# if not self.args or self.args[0] in ['--help', '-h', 'help']:
# print self.usage
# sys.exit(1)
# cmd = self.args[0]
self._load_config()
# Initialise logger after the config is loaded, so it is not disabled.
self.log = logging.getLogger(__name__)
# if cmd == 'initdb':
import ckan.model as model
model.Session.remove()
model.Session.configure(bind=model.meta.engine)
import ckanext.ytp.request.model as rmodel
self.log.info("Initializing tables")
rmodel.init_tables()
self.log.info("DB tables are setup")
# else:
# self.log.error('Command %s not recognized' % (cmd,))
|
agpl-3.0
| -445,495,278,592,315,600 | 26.604167 | 78 | 0.587925 | false | 3.943452 | false | false | false |
dfm/emcee3
|
emcee3/tests/unit/test_state.py
|
1
|
1654
|
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import numpy as np
from ...state import State
__all__ = ["test_dtype", "test_serialization", "test_repr"]
def test_dtype(seed=1234):
np.random.seed(seed)
dtype = [
("coords", np.float64, (4, )),
("log_prior", np.float64),
("log_likelihood", np.float64),
("accepted", bool)
]
coords = np.random.randn(4)
state = State(coords)
assert state.dtype == np.dtype(dtype)
state = State(coords, face=10.0, blah=6, _hidden=None)
dtype += [
("blah", int),
("face", float),
]
assert state.dtype == np.dtype(dtype)
state = State(coords, face=10.0, blah=6, _hidden=None,
matrix=np.ones((3, 1)))
dtype += [
("matrix", float, (3, 1)),
]
assert state.dtype == np.dtype(dtype)
state = State(coords, face=10.0, blah=6, _hidden=None,
matrix=np.ones((3, 1)), vector=np.zeros(3))
dtype += [
("vector", float, (3,)),
]
assert state.dtype == np.dtype(dtype)
def test_serialization(seed=1234):
np.random.seed(seed)
coords = np.random.randn(4)
state = State(coords, 0.0, -1.5, True, face="blah")
array = state.to_array()
assert np.allclose(array["coords"], coords)
new_state = State.from_array(array)
assert state == new_state
def test_repr():
coords = np.zeros(1)
lnp = 0.0
lnl = -1.5
state = State(coords, lnp, lnl, True)
assert (
repr(state) ==
"State(array({0}), log_prior={1}, log_likelihood={2}, accepted=True)"
.format(coords, lnp, lnl)
)
|
mit
| 8,939,081,856,143,124,000 | 23.686567 | 77 | 0.555623 | false | 3.174664 | true | false | false |
allenai/document-qa
|
docqa/nn/span_prediction_ops.py
|
1
|
4078
|
import tensorflow as tf
import numpy as np
"""
Some utility functions for dealing with span prediction in tensorflow
"""
def best_span_from_bounds(start_logits, end_logits, bound=None):
"""
Brute force approach to finding the best span from start/end logits in tensorflow, still usually
faster then the python dynamic-programming version
"""
b = tf.shape(start_logits)[0]
# Using `top_k` to get the index and value at once is faster
# then using argmax and then gather to get in the value
top_k = tf.nn.top_k(start_logits + end_logits, k=1)
values, indices = [tf.squeeze(x, axis=[1]) for x in top_k]
# Convert to (start_position, length) format
indices = tf.stack([indices, tf.fill((b,), 0)], axis=1)
# TODO Might be better to build the batch x n_word x n_word
# matrix and use tf.matrix_band to zero out the unwanted ones...
if bound is None:
n_lengths = tf.shape(start_logits)[1]
else:
# take the min in case the bound > the context
n_lengths = tf.minimum(bound, tf.shape(start_logits)[1])
def compute(i, values, indices):
top_k = tf.nn.top_k(start_logits[:, :-i] + end_logits[:, i:])
b_values, b_indices = [tf.squeeze(x, axis=[1]) for x in top_k]
b_indices = tf.stack([b_indices, tf.fill((b, ), i)], axis=1)
indices = tf.where(b_values > values, b_indices, indices)
values = tf.maximum(values, b_values)
return i+1, values, indices
_, values, indices = tf.while_loop(
lambda ix, values, indices: ix < n_lengths,
compute,
[1, values, indices],
back_prop=False)
spans = tf.stack([indices[:, 0], indices[:, 0] + indices[:, 1]], axis=1)
return spans, values
def packed_span_f1_mask(spans, l, bound):
starts = []
ends = []
for i in range(bound):
s = tf.range(0, l - i, dtype=tf.int32)
starts.append(s)
ends.append(s + i)
starts = tf.concat(starts, axis=0)
ends = tf.concat(ends, axis=0)
starts = tf.tile(tf.expand_dims(starts, 0), [tf.shape(spans)[0], 1])
ends = tf.tile(tf.expand_dims(ends, 0), [tf.shape(spans)[0], 1])
pred_len = tf.cast(ends - starts + 1, tf.float32)
span_start = tf.maximum(starts, spans[:, 0:1])
span_stop = tf.minimum(ends, spans[:, 1:2])
overlap_len = tf.cast(span_stop - span_start + 1, tf.float32)
true_len = tf.cast(spans[:, 1:2] - spans[:, 0:1] + 1, tf.float32)
p = overlap_len / pred_len
r = overlap_len / true_len
return tf.where(overlap_len > 0, 2 * p * r / (p + r), tf.zeros(tf.shape(starts)))
def to_packed_coordinates(spans, l, bound=None):
""" Converts the spans to vector of packed coordiantes, in the packed format
spans are indexed first by length, then by start position. If bound is given
spans are truncated to be of `bound` length """
lens = spans[:, 1] - spans[:, 0]
if bound is not None:
lens = tf.minimum(lens, bound-1)
return spans[:, 0] + l * lens - lens * (lens - 1) // 2
def to_packed_coordinates_np(spans, l, bound=None):
""" Converts the spans to vector of packed coordiantes, in the packed format
spans are indexed first by length, then by start position in a flattened array.
If bound is given spans are truncated to be of `bound` length """
lens = spans[:, 1] - spans[:, 0]
if bound is not None:
lens = np.minimum(lens, bound-1)
return spans[:, 0] + l * lens - lens * (lens - 1) // 2
def to_unpacked_coordinates(ix, l, bound):
ix = tf.cast(ix, tf.int32)
# You can actually compute the lens in closed form:
# lens = tf.floor(0.5 * (-tf.sqrt(4 * tf.square(l) + 4 * l - 8 * ix + 1) + 2 * l + 1))
# but it is very ugly and rounding errors could cause problems, so this approach seems safer
lens = []
for i in range(bound):
lens.append(tf.fill((l - i,), i))
lens = tf.concat(lens, axis=0)
lens = tf.gather(lens, ix)
answer_start = ix - l * lens + lens * (lens - 1) // 2
return tf.stack([answer_start, answer_start+lens], axis=1)
|
apache-2.0
| 2,821,853,730,584,280,600 | 36.072727 | 100 | 0.615498 | false | 3.153906 | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.